sessionInfo()
## R version 3.5.2 (2018-12-20)
## Platform: x86_64-w64-mingw32/x64 (64-bit)
## Running under: Windows 10 x64 (build 17763)
## 
## Matrix products: default
## 
## locale:
## [1] LC_COLLATE=English_United States.1252 
## [2] LC_CTYPE=English_United States.1252   
## [3] LC_MONETARY=English_United States.1252
## [4] LC_NUMERIC=C                          
## [5] LC_TIME=English_United States.1252    
## 
## attached base packages:
## [1] stats     graphics  grDevices utils     datasets  methods   base     
## 
## loaded via a namespace (and not attached):
##  [1] compiler_3.5.2  magrittr_1.5    tools_3.5.2     htmltools_0.3.6
##  [5] yaml_2.2.0      Rcpp_1.0.0      stringi_1.2.4   rmarkdown_1.11 
##  [9] knitr_1.21      stringr_1.3.1   xfun_0.4        digest_0.6.18  
## [13] evaluate_0.12

User Inputs

output.var = params$output.var 
transform.abs = params$transform.abs
log.pred = params$log.pred
eda = params$eda
algo.forward = params$algo.forward
algo.backward = params$algo.backward
algo.stepwise = params$algo.stepwise
algo.LASSO = params$algo.LASSO
algo.LARS = params$algo.LARS
  
algo.forward.caret = params$algo.forward.caret
algo.backward.caret = params$algo.backward.caret
algo.stepwise.caret = params$algo.stepwise.caret
algo.LASSO.caret = params$algo.LASSO.caret
algo.LARS.caret = params$algo.LARS.caret

message("Parameters used for training/prediction: ")
## Parameters used for training/prediction:
str(params)
## List of 14
##  $ output.var         : chr "y3"
##  $ transform.abs      : logi FALSE
##  $ log.pred           : logi FALSE
##  $ eda                : logi FALSE
##  $ algo.forward       : logi FALSE
##  $ algo.backward      : logi FALSE
##  $ algo.stepwise      : logi FALSE
##  $ algo.LASSO         : logi FALSE
##  $ algo.LARS          : logi FALSE
##  $ algo.forward.caret : logi TRUE
##  $ algo.backward.caret: logi TRUE
##  $ algo.stepwise.caret: logi TRUE
##  $ algo.LASSO.caret   : logi TRUE
##  $ algo.LARS.caret    : logi TRUE
# Setup Labels
# alt.scale.label.name = Alternate Scale variable name
#   - if predicting on log, then alt.scale is normal scale
#   - if predicting on normal scale, then alt.scale is log scale
if (log.pred == TRUE){
  label.names = paste('log.',output.var,sep="")
  alt.scale.label.name = output.var
}
if (log.pred == FALSE){
  label.names = output.var
  alt.scale.label.name = paste('log.',output.var,sep="")
}

Prepare Data

Read and Clean Features

features = read.csv("../../Data/features.csv")
features.highprec = read.csv("../../Data/features_highprec.csv")
all.equal(features, features.highprec)
##  [1] "Component \"x11\": Mean relative difference: 0.001401482"     
##  [2] "Component \"stat9\": Mean relative difference: 0.0002946299"  
##  [3] "Component \"stat12\": Mean relative difference: 0.0005151515" 
##  [4] "Component \"stat13\": Mean relative difference: 0.001354369"  
##  [5] "Component \"stat18\": Mean relative difference: 0.0005141104" 
##  [6] "Component \"stat22\": Mean relative difference: 0.001135977"  
##  [7] "Component \"stat25\": Mean relative difference: 0.0001884615" 
##  [8] "Component \"stat29\": Mean relative difference: 0.001083691"  
##  [9] "Component \"stat36\": Mean relative difference: 0.00021513"   
## [10] "Component \"stat37\": Mean relative difference: 0.0004578125" 
## [11] "Component \"stat43\": Mean relative difference: 0.0003473684" 
## [12] "Component \"stat45\": Mean relative difference: 0.0002951699" 
## [13] "Component \"stat46\": Mean relative difference: 0.0009745763" 
## [14] "Component \"stat47\": Mean relative difference: 8.829902e-05" 
## [15] "Component \"stat55\": Mean relative difference: 0.001438066"  
## [16] "Component \"stat57\": Mean relative difference: 0.0001056911" 
## [17] "Component \"stat58\": Mean relative difference: 0.0004882261" 
## [18] "Component \"stat60\": Mean relative difference: 0.0002408377" 
## [19] "Component \"stat62\": Mean relative difference: 0.0004885106" 
## [20] "Component \"stat66\": Mean relative difference: 1.73913e-06"  
## [21] "Component \"stat67\": Mean relative difference: 0.0006265823" 
## [22] "Component \"stat73\": Mean relative difference: 0.003846154"  
## [23] "Component \"stat75\": Mean relative difference: 0.002334906"  
## [24] "Component \"stat83\": Mean relative difference: 0.0005628415" 
## [25] "Component \"stat86\": Mean relative difference: 0.0006104418" 
## [26] "Component \"stat94\": Mean relative difference: 0.001005115"  
## [27] "Component \"stat97\": Mean relative difference: 0.0003551913" 
## [28] "Component \"stat98\": Mean relative difference: 0.0006157635" 
## [29] "Component \"stat106\": Mean relative difference: 0.0008267717"
## [30] "Component \"stat109\": Mean relative difference: 0.0005121359"
## [31] "Component \"stat110\": Mean relative difference: 0.0007615527"
## [32] "Component \"stat111\": Mean relative difference: 0.001336134" 
## [33] "Component \"stat114\": Mean relative difference: 7.680492e-05"
## [34] "Component \"stat117\": Mean relative difference: 0.0002421784"
## [35] "Component \"stat122\": Mean relative difference: 0.0006521084"
## [36] "Component \"stat123\": Mean relative difference: 8.333333e-05"
## [37] "Component \"stat125\": Mean relative difference: 0.002385135" 
## [38] "Component \"stat130\": Mean relative difference: 0.001874016" 
## [39] "Component \"stat132\": Mean relative difference: 0.0003193182"
## [40] "Component \"stat135\": Mean relative difference: 0.0001622517"
## [41] "Component \"stat136\": Mean relative difference: 7.722008e-05"
## [42] "Component \"stat138\": Mean relative difference: 0.0009739953"
## [43] "Component \"stat143\": Mean relative difference: 0.0004845361"
## [44] "Component \"stat146\": Mean relative difference: 0.0005821596"
## [45] "Component \"stat148\": Mean relative difference: 0.0005366922"
## [46] "Component \"stat153\": Mean relative difference: 0.0001557522"
## [47] "Component \"stat154\": Mean relative difference: 0.001351916" 
## [48] "Component \"stat157\": Mean relative difference: 0.0005427928"
## [49] "Component \"stat162\": Mean relative difference: 0.002622951" 
## [50] "Component \"stat167\": Mean relative difference: 0.0005905172"
## [51] "Component \"stat168\": Mean relative difference: 0.0002791096"
## [52] "Component \"stat169\": Mean relative difference: 0.0004121827"
## [53] "Component \"stat170\": Mean relative difference: 0.0004705882"
## [54] "Component \"stat174\": Mean relative difference: 0.0003822894"
## [55] "Component \"stat179\": Mean relative difference: 0.0008286604"
## [56] "Component \"stat184\": Mean relative difference: 0.0007526718"
## [57] "Component \"stat187\": Mean relative difference: 0.0005122768"
## [58] "Component \"stat193\": Mean relative difference: 4.215116e-05"
## [59] "Component \"stat199\": Mean relative difference: 0.002155844" 
## [60] "Component \"stat203\": Mean relative difference: 0.0003738318"
## [61] "Component \"stat213\": Mean relative difference: 0.000667676" 
## [62] "Component \"stat215\": Mean relative difference: 0.0003997955"
head(features)
##     JobName        x1       x2       x3        x4       x5        x6
## 1 Job_00001 2.0734508 4.917267 19.96188  3.520878 7.861051 1.6067589
## 2 Job_00002 2.2682543 4.955773 19.11939 19.763031 6.931355 1.3622041
## 3 Job_00003 1.7424456 2.059819 13.37912 38.829132 6.274053 2.0529845
## 4 Job_00004 0.7873555 2.613983 17.23044 64.402557 5.377652 0.9067419
## 5 Job_00005 2.3342753 4.299076 14.64883 52.537304 6.793368 2.4605792
## 6 Job_00006 1.2365089 2.795370 11.13127 96.819939 6.583971 2.3510606
##         x7        x8       x9       x10      x11      x12       x13
## 1 2.979479  8.537228 1.103368 4.6089458 1.05e-07 7.995825 13.215498
## 2 2.388119  6.561461 0.588572 1.0283282 1.03e-07 7.486966 22.557224
## 3 2.043592 10.275595 4.834385 4.3872848 1.06e-07 6.350142 15.049810
## 4 2.395118 13.487331 3.340190 4.5053501 9.47e-08 9.548698 17.170635
## 5 2.891535  9.362389 1.246039 1.7333300 1.01e-07 9.596095  5.794567
## 6 1.247838  7.033354 1.852231 0.4839371 1.07e-07 3.810983 23.863169
##        x14       x15       x16      x17      x18      x19      x20
## 1 4.377983 0.2370623  6.075459 3.988347 4.767475 2.698775 1.035893
## 2 2.059315 0.5638121  6.903891 4.152054 6.849232 9.620731 1.915288
## 3 3.260057 2.0603445  8.424065 4.489893 3.493591 4.715386 1.558103
## 4 3.093478 1.8806034 11.189792 2.134271 5.588357 5.107871 1.489588
## 5 3.943076 1.5820830  7.096742 3.563378 7.765610 1.360272 1.240283
## 6 1.280562 1.1733382  7.062051 1.341864 7.748325 5.009365 1.725179
##        x21      x22      x23      stat1      stat2      stat3      stat4
## 1 42.36548 1.356213 2.699796  2.3801832  0.1883335 -1.2284011 -0.5999233
## 2 26.63295 4.053961 2.375127 -1.4069480  1.8140973  1.6204884  2.6422672
## 3 20.09693 3.079888 4.488420 -0.7672566 -0.1230289  1.1415752  2.9805934
## 4 32.60415 1.355396 3.402398  0.4371202 -1.9355906  0.9028624 -1.6025400
## 5 44.58361 1.940301 2.249011  2.4492466 -0.6172000 -2.5520642 -2.1485929
## 6 28.75102 2.500499 5.563972 -1.7899084  1.8853619  2.4154840 -2.6022179
##          stat5      stat6      stat7      stat8      stat9      stat10
## 1  0.148893163 -0.6622978 -2.4851868  0.3647782  2.5364335  2.92067981
## 2  1.920768980  1.7411555 -1.9599979 -2.0190558 -1.3732762 -0.31642506
## 3  2.422584300 -0.4166040  2.2205689 -2.6741531  0.4844292  2.73379230
## 4 -0.001795933 -0.6946563 -0.3693534 -0.9709467  1.7960306  0.74771154
## 5 -2.311132430 -1.0166832  2.7269876  1.5424492 -1.3156369 -0.09767897
## 6 -1.785491470 -1.8599915  1.4875095  2.0188572 -1.4892503 -1.41103566
##       stat11    stat12     stat13     stat14     stat15     stat16
## 1 -2.3228905 -2.480567 -0.6335157 -0.3650149 -0.5322812  0.6029300
## 2 -0.8547903  1.119316  0.7227427  0.2121097 -0.1452281 -2.0361528
## 3 -2.1821580  2.865401 -2.9756081  2.9871745  1.9539525 -1.8857163
## 4  1.3982378  1.856765 -1.0379983  2.3341896  2.3057184 -2.8947697
## 5  0.9567220  2.567549  0.3184886  1.0307668  0.1644241 -0.6613821
## 6  0.5341771 -1.461822  0.4402476 -1.9282095 -0.3680157  1.8188807
##        stat17     stat18     stat19     stat20     stat21     stat22
## 1 -1.04516208  2.3544915  2.4049001  0.2633883 -0.9788178  1.7868229
## 2  0.09513074  0.4727738  1.8899702  2.7892542 -1.3919091 -1.7198164
## 3  0.40285346  1.4655282 -1.4952788  2.9162340 -2.3893208  2.8161423
## 4  2.97446084  2.3896182  2.3083484 -1.1894441 -2.1982553  1.3666242
## 5 -0.98465055  0.6900643  1.5894209 -2.1204538  1.7961155 -0.9362189
## 6 -1.45726359 -2.1139548 -0.3964904  1.1764175 -2.9100556 -2.1359294
##       stat23     stat24     stat25    stat26     stat27      stat28
## 1 -2.3718851  2.8580718 -0.4719713 -2.817086 -0.9518474  2.88892484
## 2 -2.3293245  1.5577759 -1.9569720  1.554194 -0.5081459 -1.58715141
## 3 -2.5402296  0.1422861  0.3572798 -1.051886 -2.1541717  0.03074004
## 4 -1.9679050 -1.4077642  2.5097435  1.683121 -0.2549745 -2.90384054
## 5  2.0523429 -2.2084844 -1.9280857 -2.116736  1.8180779 -1.42167580
## 6  0.2184991 -0.7599817  2.6880329 -2.903350 -1.0733233 -2.92416644
##       stat29     stat30     stat31     stat32      stat33     stat34
## 1  0.7991088 -2.0059092 -0.2461502  0.6482101 -2.87462163 -0.3601543
## 2  1.9758110 -0.3874187  1.3566630  2.6493473  2.28463054  1.8591728
## 3 -0.4460218  1.0279679  1.3998452 -1.0183365  1.41109037 -2.4183984
## 4  1.0571996  2.5588036 -2.9830337 -1.1299983  0.05470414 -1.5566561
## 5  0.8854889  2.2774174  2.6499031  2.3053405 -2.39148426 -1.8272992
## 6 -0.8405267  0.1311945  0.4321289 -2.9622040 -2.55387473  2.6396458
##       stat35     stat36     stat37     stat38     stat39     stat40
## 1  2.4286051 -0.5420244 -2.6782637 -2.8874269 -0.8945006  1.1749642
## 2  1.3709245 -1.3714181  1.3901204  1.2273489 -0.8934880  1.0540369
## 3 -0.9805572  2.0571353  0.8845031  2.0574493  1.1222047  1.8528618
## 4  1.0969149 -2.2820673  1.8852408  0.5391517  2.7334342 -0.4372566
## 5 -1.0971669  1.4867796 -2.3738465 -0.3743561  1.4266498  1.2551680
## 6  0.4584349 -2.2696617 -0.9935142 -0.5350499 -0.7874799  2.0009417
##       stat41     stat42     stat43     stat44     stat45      stat46
## 1 -1.0474428 -1.3909023 2.54110503 -1.4320793  0.6298335 -2.09296608
## 2  2.5380247  1.6476108 0.44128850 -2.5049477  1.2726039  1.72492969
## 3  1.1477574  0.2288794 0.08891252  2.3044751 -0.7735722 -0.07302936
## 4 -1.3808300 -2.7900956 2.38297582  0.1686397 -2.1591296  1.60828602
## 5  0.2257536  1.9542116 2.66429019  0.8026123 -1.5521187  1.61751962
## 6 -1.3364114 -2.2898803 2.80735397 -0.8413086  1.0057797 -1.50653386
##       stat47     stat48     stat49     stat50     stat51     stat52
## 1 -2.8318939  2.1445766  0.5668035  0.1544579  0.6291955  2.2197027
## 2 -0.5804687 -1.3689737  1.4908396  1.2465997  0.8896304 -2.6024318
## 3  0.7918019  1.5712964  1.1038082 -0.2545658 -2.1662638  0.2660159
## 4 -1.8894132  0.5680230 -0.7023218 -0.3972188  0.1578027  2.1770194
## 5  2.1088455 -2.7195437  2.1961412 -0.2615084  1.2109556  0.8260623
## 6 -1.4400891 -0.9421459 -1.7324599 -2.1720727 -2.8129435  0.6958785
##      stat53     stat54      stat55     stat56     stat57      stat58
## 1  2.176805  0.5546907 -2.19704103 -0.2884173  1.3232913 -1.32824039
## 2 -2.107441  1.3864788  0.08781975  1.9998228  0.8014438 -0.26979154
## 3  1.234197  2.1337581  1.65231645 -0.4388691 -0.1811156  2.11277962
## 4  2.535406 -2.1387620  0.12856023 -1.9906180  0.9626449  1.65232646
## 5 -2.457080  2.1633499  0.60441124  2.5449364 -1.4978440  2.60542655
## 6  2.003033 -0.5379940 -2.19647264 -1.1954677 -0.5974466 -0.04703835
##        stat59     stat60    stat61      stat62     stat63     stat64
## 1  1.24239659 -2.5798278  1.327928  1.68560362  0.6284891 -1.6798652
## 2  0.06379301  0.9465770  1.116928  0.03128772 -2.1944375  0.3382609
## 3  0.93223447  2.4597080  0.465251 -1.71033382 -0.5156728  1.8276784
## 4 -0.29840910  0.7273473 -2.313066 -1.47696018  2.5910559 -1.5127999
## 5 -1.17610002 -1.7948418 -2.669305  0.17813617  2.8956099  2.9411416
## 6 -1.01793981  0.2817057  2.228023 -0.86494124 -0.9747949 -0.1569053
##       stat65     stat66     stat67     stat68     stat69    stat70
## 1 -2.9490898 -0.3325469  1.5745990 -2.2978280  1.5451891 -1.345990
## 2 -1.1174885 -1.5728682 -2.9229002  0.2658547 -1.9616533  2.506130
## 3 -0.2231264 -0.4503301  0.7932286 -1.2453773 -2.2309763  2.309761
## 4 -0.3522418 -2.0720532  0.9442933  2.9212906  0.5100371 -2.441108
## 5 -2.1648991  1.2002029  2.8266985  0.7461294  1.6772674 -1.280000
## 6 -2.2295458  1.1446493  0.2024925 -0.2983998 -2.8203752  1.224030
##       stat71     stat72     stat73     stat74      stat75     stat76
## 1  1.0260956  2.1071210  2.6625669 -2.8924677 -0.02132523 -2.5058765
## 2  0.3525076  1.6922342 -1.2167022 -1.7271879  2.21176434  1.9329683
## 3 -2.1799035 -2.2645276  0.1415582  0.9887453  1.95592320  0.2951785
## 4 -2.4051409  2.0876484 -0.8632146  0.4011389 -1.16986716 -1.2391174
## 5  1.3538754 -0.8089395 -0.5122626 -2.1696892  1.07344925  2.6696169
## 6 -2.8073371 -1.4450488  0.5481212 -1.4381690  0.80917043 -0.1365944
##       stat77      stat78      stat79     stat80     stat81     stat82
## 1 -2.5631845 -2.40331340  0.38416120 -1.2564875 -0.1550840 -1.1762617
## 2 -0.4462085  0.38400793  1.80483031 -0.8387642  0.7624431  0.9936900
## 3  1.6757870 -1.81900752  2.70904708 -0.3201959  2.5754235  1.6346260
## 4 -2.1012006 -2.24691081  1.78056848  1.0323739  1.0762523  2.1343851
## 5 -2.5736733 -1.99958372 -0.05388495 -2.5630073 -2.8783002 -0.5752426
## 6  1.6143972  0.03233746  2.90835762  1.4000487  2.9275615 -2.8503830
##       stat83     stat84     stat85     stat86    stat87     stat88
## 1  1.2840565 -2.6794965  1.3956039 -1.5290235  2.221152  2.3794982
## 2 -0.2380048  1.9314318 -1.6747955 -0.3663656  1.582659 -0.5222489
## 3 -0.9150769 -1.5520337  2.4186287  2.7273662  1.306642  0.1320062
## 4 -2.5824408 -2.7775943  0.5085060  0.4689015  2.053348  0.7957955
## 5 -1.0017741 -0.2009138  0.3770109  2.4335201 -1.118058  1.3953410
## 6  2.4891765  2.9931953 -1.4171852  0.3905659 -1.856119 -2.1690490
##       stat89     stat90     stat91      stat92     stat93     stat94
## 1 -0.9885110 -0.8873261 -2.7810929 -1.53325891  2.6002395  1.8890998
## 2  0.9982028 -1.2382015 -0.1574496  0.41086048 -0.5412626 -0.2421387
## 3  0.5956759  1.6871066  2.2452753  2.74279594 -1.5860478  2.9393122
## 4  2.0902634  2.1752586 -2.0677712 -2.37861037  1.1653302  0.1500632
## 5  2.9820614  0.8111660 -0.7842287  0.03766387 -1.1681970  2.1217251
## 6 -1.7428021  0.1579032  1.7456742 -0.36858466 -0.1304616 -1.4555819
##       stat95     stat96      stat97     stat98     stat99   stat100
## 1 -2.6056035 -0.5814857  2.57652426 -2.3297751  2.6324007  1.445827
## 2 -2.0271583 -0.9126074  2.49582648  0.9745382  1.1339203 -2.549544
## 3  0.3823181 -0.6324139  2.46221566  1.1151560  0.4624891  0.107072
## 4  2.6414623 -0.6630505  2.10394859  1.2627635  0.4861740  1.697012
## 5  1.4642254  2.6485956 -0.07699547  0.6219473 -1.8815142 -2.685463
## 6  1.8937331 -0.4690555  1.04671776 -0.5879866 -0.9766789  2.405940
##      stat101   stat102    stat103    stat104    stat105    stat106
## 1 -2.1158021  2.603936  1.7745128 -1.8903574 -1.8558655  1.0122044
## 2 -2.7998588 -2.267895  0.5336456 -0.2859477 -0.5196246 -0.9417582
## 3  0.7969509 -1.744906 -0.7960327  1.9767258 -0.2007264 -0.7872376
## 4  1.7071959 -1.540221  1.6770362  1.5395796 -0.4855365 -1.2894115
## 5 -1.4627420 -1.700983  2.4376490  0.2731541  1.5275587  1.3256483
## 6  2.6888530  1.090155  2.0769854  1.9615480  1.8689761  2.8975825
##     stat107    stat108   stat109    stat110    stat111    stat112
## 1  1.954508 -0.3376471  2.503084  0.3099165  2.7209847 -2.3911204
## 2 -2.515160  0.3998704 -1.077093  2.4228268 -0.7759693  0.2513882
## 3  1.888827  1.5819857 -2.066659 -2.0008364  0.6997684  2.6157095
## 4  1.076395 -1.8524148 -2.689204  1.0985872  1.2389493  2.1018629
## 5  2.828866 -1.8590252 -2.424163  1.4391942 -0.6173239 -1.5218846
## 6 -1.419639  0.7888914  1.996463  0.9813507  0.9034198  1.3810679
##     stat113    stat114    stat115     stat116   stat117    stat118
## 1 -1.616161  1.0878664  0.9860094 -0.06288462 -1.013501 -1.2212842
## 2 -1.554771  1.8683100  0.4880588 -0.63865489 -1.610217 -1.7713343
## 3 -2.679801 -2.9486952  1.7753417  0.90311784 -1.318836 -0.1429040
## 4  2.459229 -0.5584171  0.4419581 -0.09586351  0.595442  0.2883342
## 5 -2.102200  1.6300170 -2.3498287  1.36771894 -1.912202 -0.2563821
## 6 -1.835037  0.6577786 -2.9928374  2.13540316 -1.437299 -0.9570006
##      stat119    stat120    stat121    stat122    stat123     stat124
## 1  2.9222729  1.9151262  1.6686068  2.0061224  1.5723072  0.78819227
## 2  2.1828208  0.8283178 -2.4458632  1.7133740  1.1393738 -0.07182054
## 3  0.9721319  1.2723130  2.8002086  2.7670381 -2.2252586  2.17499113
## 4 -1.9327896 -2.5369370  1.7835028  1.0262097 -1.8790983 -0.43639564
## 5  1.3230809 -2.8145256 -0.9547533 -2.0435417 -0.2758764 -1.85668027
## 6  0.1720700 -1.4568460  1.4115051 -0.9878145  2.3895061 -2.33730745
##     stat125    stat126    stat127   stat128    stat129     stat130
## 1  1.588372  1.1620011 -0.2474264  1.650328  2.5147598  0.37283245
## 2 -1.173771  0.8162020  0.3510315 -1.263667  1.7245284 -0.72852904
## 3 -1.503497 -0.5656394  2.8040256 -2.139287 -1.7221642  2.17899609
## 4  1.040967 -2.9039600  0.3103742  1.462339 -1.2940350 -2.95015502
## 5 -2.866184  1.6885070 -2.2525666 -2.628631  1.8581577  2.80127025
## 6 -1.355111  1.5017927  0.4295921 -0.580415  0.9851009 -0.03773117
##       stat131    stat132    stat133    stat134    stat135      stat136
## 1 -0.09028241  0.5194538  2.8478346  2.6664724 -2.0206311  1.398415090
## 2 -0.53045595  1.4134049  2.9180586  0.3299096  1.4784122 -1.278896090
## 3  1.35843194  0.2279946  0.3532595  0.6138676 -0.3443284  0.057763811
## 4 -1.92450273  1.2698178 -1.5299660 -2.6083462  1.1665530 -0.187791914
## 5  1.49036849  2.6337729 -2.3206244  0.4978287 -1.7397571  0.001200184
## 6 -0.64642709 -1.9256228  1.7032650 -0.9152725 -0.3188055  2.155395980
##      stat137    stat138    stat139    stat140    stat141    stat142
## 1 -1.2794871  0.4064890 -0.4539998  2.6660173 -1.8375313  0.4711883
## 2 -2.7709017 -1.6303773 -1.9025910  0.2572918  0.6612002  1.4764348
## 3 -1.1930757 -0.1051243 -0.5108380 -1.0879666  2.4969513 -0.9477230
## 4 -1.2318919  2.2348571  0.1788580 -1.5851788 -1.2384283 -2.1859181
## 5  1.8685058  2.7229517 -2.9077182  2.6606939 -1.5963592 -2.2213492
## 6 -0.4807318 -1.2117369 -0.9358531 -2.5100758 -2.3803916 -0.7096854
##     stat143    stat144    stat145    stat146     stat147    stat148
## 1 1.9466263  2.2689433 -0.3597288 -0.6551386  1.65438592  0.6404466
## 2 1.3156421  2.4459090 -0.3790028  1.4858465 -0.07784461  1.0096149
## 3 0.1959563  2.3062942  1.8459278  2.6848175 -2.70935774 -1.2093409
## 4 1.7633296 -2.8171508  2.0902622 -2.6625464 -1.12600601 -2.1926479
## 5 0.3885758  1.8160636  2.8257299 -1.4526173  1.60679603  2.3807991
## 6 0.7623450  0.2692145 -2.4307463 -2.1244523 -2.67803812 -1.5273387
##      stat149   stat150    stat151    stat152    stat153    stat154
## 1  0.1583575 0.4755351  0.3213410  2.0241520  1.5720103 -0.1825875
## 2 -0.4311406 2.9577663  0.6937252  0.1397280  0.3775735 -1.1012636
## 3 -0.8352824 2.5716205  1.7528236  0.4326277 -2.2334397 -2.6265771
## 4 -2.8069143 1.8813509  2.3358023  0.1015632  1.2117474 -1.3714278
## 5 -1.6166265 1.1112266 -1.1998471  2.9316769 -2.1676455 -0.3411089
## 6 -0.2265472 2.7264354 -1.6746094 -2.3376281 -1.7022788 -1.2352397
##     stat155     stat156    stat157    stat158     stat159   stat160
## 1 -1.139657  0.07061254  0.5893906 -1.9920996 -2.83714366  2.249398
## 2 -2.041093  0.74047768  2.5415072 -1.2697256 -1.64364433 -2.448922
## 3 -1.219507 -0.55198693  0.4046920  1.2098547 -0.90412390 -1.934093
## 4  2.992191  2.33222485  2.0622969 -0.6714653  2.76836085 -1.431120
## 5 -2.362356 -1.23906672  0.4746319 -0.7849202  0.69399995  2.052411
## 6 -1.604499  1.31051409 -0.5164744  0.6288667  0.07899523 -2.287402
##      stat161    stat162    stat163    stat164    stat165    stat166
## 1  1.7182635 -1.2323593  2.7350423  1.0707235  1.1621544  0.9493989
## 2 -0.6247674  2.6740098  2.8211024  1.5561292 -1.1027147  1.0519739
## 3 -0.6230453 -0.7993517 -2.8318374 -1.1148673  1.4261659  0.5294309
## 4  1.7644744  0.1696584  1.2653207  0.6621516  0.9470508  0.1985014
## 5 -1.2070210  0.7243784  0.9736322  2.7426259 -2.6862383  1.6840212
## 6  2.3705316 -2.1667893 -0.2516685 -0.8425958 -1.9099342 -2.8607297
##      stat167    stat168    stat169     stat170     stat171    stat172
## 1  0.1146510  2.3872008  1.1180918 -0.95370555 -2.25076509  0.2348182
## 2  1.0760417 -2.0449336  0.9715676 -0.40173489 -0.11953555 -2.3107369
## 3  1.1735898  1.3860190 -2.2894719  0.06350347  0.29191551 -1.6079744
## 4  2.5511832  0.5446648  1.2694012 -0.84571201  0.79789722  0.2623538
## 5  2.2900002  2.6289782 -0.2783571  1.39032829 -0.55532032  1.0499046
## 6 -0.7513983  2.9617066 -2.2119520 -1.71958113 -0.01452018 -0.2751517
##       stat173   stat174     stat175    stat176     stat177    stat178
## 1  1.79366076 -1.920206 -0.38841942  0.8530301  1.64532077 -1.1354179
## 2 -0.07484659  1.337846  2.20911694  0.9616837 -2.80810070 -2.1136749
## 3 -1.05521810 -1.483741  0.06148359  2.3066039 -0.34688616  1.1840581
## 4  0.31460321  1.195741  2.97633862  1.1685091 -0.06346265  1.4205489
## 5 -1.39428365  2.458523  0.64836472 -1.0396386 -0.57828104 -0.5006818
## 6  2.31844401  1.239864 -2.06490874  0.7696204 -1.77586019  2.0855925
##      stat179    stat180     stat181    stat182    stat183    stat184
## 1  2.0018647  0.1476815 -1.27279520  1.9181504 -0.5297624 -2.9718938
## 2 -2.1351449  2.9012582 -1.09914911 -2.5488517 -2.8377736  1.4073374
## 3 -1.7819908  2.9902627  0.81908613  0.2503852  0.3712984 -2.1714024
## 4 -0.1026974 -2.4763253 -2.52645421  1.3096315  2.1458161 -1.5228094
## 5 -2.2298794  2.4465680 -0.70346898 -1.6997617  2.9178164 -0.3615532
## 6 -1.1168108  1.5552123 -0.01361342  1.7338791 -1.1104763  0.1882416
##      stat185    stat186   stat187    stat188    stat189    stat190
## 1 -0.1043832 -1.5047463  2.700351 -2.4780862 -1.9078265  0.9978108
## 2 -2.0310574 -0.5380074 -1.963275 -1.2221278 -2.4290681 -1.9515115
## 3  2.6727278  1.2688179 -1.399018 -2.9612138  2.6456394  2.0073323
## 4 -2.7796295  2.0682354  2.243727  0.4296881  0.1931333  2.2710960
## 5 -0.6231265  2.5833981  2.229041  0.8139584  1.4544131  1.8886451
## 6  2.7204690 -2.4469144 -1.421998  1.7477882 -0.1481806  0.6011560
##      stat191    stat192    stat193    stat194   stat195    stat196
## 1 -0.6644351  2.6270833 -1.1094601 -2.4200392  2.870713 -0.6590932
## 2 -0.6483142  1.4519118 -0.1963493 -2.3025322  1.255608  2.1617947
## 3 -1.5457382 -0.2977442 -1.7045015  0.7962404 -1.696063 -1.4771117
## 4 -1.1780495 -2.9747574 -1.1471518 -1.2377013 -1.010672 -2.6055975
## 5  2.8813178 -1.8964081 -1.2653487 -1.7839754 -2.872581  2.3033464
## 6  0.4437973  0.6599325 -1.4029555 -2.3118258 -1.792232  1.3934380
##       stat197    stat198    stat199    stat200    stat201    stat202
## 1 -0.83056986  0.9550526 -1.7025776 -2.8263099 -0.7023998  0.2272806
## 2 -1.42178249 -1.2471864  2.5723093 -0.0233496 -1.8975239  1.9472262
## 3 -0.19233958 -0.5161456  0.0279946 -1.2333704 -2.9672263 -2.8666208
## 4 -1.23145902  1.4728470 -0.4562025 -2.2983441 -1.5101184  0.2530525
## 5  1.85018563 -1.8269292 -0.6337969 -2.1473246  0.9909850  1.0950903
## 6 -0.09311061  0.5144456 -2.8178268 -2.7555969 -2.3546004 -1.0558939
##        stat203      stat204    stat205    stat206    stat207    stat208
## 1  1.166631220  0.007453276  2.9961641  1.5327307 -2.2293356 -0.9946009
## 2 -0.235396504  2.132749800  0.3707606  1.5604026 -1.0089217  2.1474257
## 3  0.003180946  2.229793310  2.7354040  0.8992231  2.9694967  2.3081024
## 4 -0.474482715 -1.584772230 -2.3224132 -0.9409741 -2.3179255  0.8032548
## 5  2.349412920 -1.276320220 -2.0203719 -1.1733509  1.0371852 -2.5086207
## 6  0.727436960 -0.960191786 -0.8964998 -1.6406623 -0.2330488  1.7993879
##      stat209    stat210   stat211    stat212    stat213    stat214
## 1 -2.2182105 -1.4099774 -1.656754  2.6602585 -2.9270992  1.1240714
## 2 -2.8932488 -1.1641679 -2.605423 -1.5650513  2.9523673  2.0266318
## 3 -1.8279589  0.0472350 -2.026734  2.5054367  0.9903042  0.3274105
## 4 -1.0878067  0.1171303  2.645891 -1.6775225  1.3452160  1.4694063
## 5 -0.8158175  0.4060950  0.912256  0.2925677  2.1610141  0.5679936
## 6 -2.2664354 -0.2061083 -1.435174  2.6645632  0.4216259 -0.6419122
##      stat215    stat216    stat217
## 1 -2.7510750 -0.5501796  1.2638469
## 2  2.8934650 -2.4099574 -1.2411407
## 3 -1.0947676  1.2852937  1.5411530
## 4  0.6343777  0.1345372  2.9102673
## 5  0.9908702  1.7909757 -2.0902610
## 6 -2.8113887 -1.0624912  0.2765074
head(features.highprec)
##     JobName        x1       x2       x3        x4       x5        x6
## 1 Job_00001 2.0734508 4.917267 19.96188  3.520878 7.861051 1.6067589
## 2 Job_00002 2.2682543 4.955773 19.11939 19.763031 6.931355 1.3622041
## 3 Job_00003 1.7424456 2.059819 13.37912 38.829132 6.274053 2.0529845
## 4 Job_00004 0.7873555 2.613983 17.23044 64.402557 5.377652 0.9067419
## 5 Job_00005 2.3342753 4.299076 14.64883 52.537304 6.793368 2.4605792
## 6 Job_00006 1.2365089 2.795370 11.13127 96.819939 6.583971 2.3510606
##         x7        x8       x9       x10          x11      x12       x13
## 1 2.979479  8.537228 1.103368 4.6089458 1.050025e-07 7.995825 13.215498
## 2 2.388119  6.561461 0.588572 1.0283282 1.034518e-07 7.486966 22.557224
## 3 2.043592 10.275595 4.834385 4.3872848 1.062312e-07 6.350142 15.049810
## 4 2.395118 13.487331 3.340190 4.5053501 9.471887e-08 9.548698 17.170635
## 5 2.891535  9.362389 1.246039 1.7333300 1.010552e-07 9.596095  5.794567
## 6 1.247838  7.033354 1.852231 0.4839371 1.071662e-07 3.810983 23.863169
##        x14       x15       x16      x17      x18      x19      x20
## 1 4.377983 0.2370623  6.075459 3.988347 4.767475 2.698775 1.035893
## 2 2.059315 0.5638121  6.903891 4.152054 6.849232 9.620731 1.915288
## 3 3.260057 2.0603445  8.424065 4.489893 3.493591 4.715386 1.558103
## 4 3.093478 1.8806034 11.189792 2.134271 5.588357 5.107871 1.489588
## 5 3.943076 1.5820830  7.096742 3.563378 7.765610 1.360272 1.240283
## 6 1.280562 1.1733382  7.062051 1.341864 7.748325 5.009365 1.725179
##        x21      x22      x23      stat1      stat2      stat3      stat4
## 1 42.36548 1.356213 2.699796  2.3801832  0.1883335 -1.2284011 -0.5999233
## 2 26.63295 4.053961 2.375127 -1.4069480  1.8140973  1.6204884  2.6422672
## 3 20.09693 3.079888 4.488420 -0.7672566 -0.1230289  1.1415752  2.9805934
## 4 32.60415 1.355396 3.402398  0.4371202 -1.9355906  0.9028624 -1.6025400
## 5 44.58361 1.940301 2.249011  2.4492466 -0.6172000 -2.5520642 -2.1485929
## 6 28.75102 2.500499 5.563972 -1.7899084  1.8853619  2.4154840 -2.6022179
##          stat5      stat6      stat7      stat8      stat9      stat10
## 1  0.148893163 -0.6622978 -2.4851868  0.3647782  2.5364335  2.92067981
## 2  1.920768980  1.7411555 -1.9599979 -2.0190558 -1.3732762 -0.31642506
## 3  2.422584300 -0.4166040  2.2205689 -2.6741531  0.4844292  2.73379230
## 4 -0.001795933 -0.6946563 -0.3693534 -0.9709467  1.7960306  0.74771154
## 5 -2.311132430 -1.0166832  2.7269876  1.5424492 -1.3156369 -0.09767897
## 6 -1.785491470 -1.8599915  1.4875095  2.0188572 -1.4892503 -1.41103566
##       stat11    stat12     stat13     stat14     stat15     stat16
## 1 -2.3228905 -2.480567 -0.6335157 -0.3650149 -0.5322812  0.6029300
## 2 -0.8547903  1.119316  0.7227427  0.2121097 -0.1452281 -2.0361528
## 3 -2.1821580  2.865401 -2.9756081  2.9871745  1.9539525 -1.8857163
## 4  1.3982378  1.856765 -1.0379983  2.3341896  2.3057184 -2.8947697
## 5  0.9567220  2.567549  0.3184886  1.0307668  0.1644241 -0.6613821
## 6  0.5341771 -1.461822  0.4402476 -1.9282095 -0.3680157  1.8188807
##        stat17     stat18     stat19     stat20     stat21     stat22
## 1 -1.04516208  2.3544915  2.4049001  0.2633883 -0.9788178  1.7868229
## 2  0.09513074  0.4727738  1.8899702  2.7892542 -1.3919091 -1.7198164
## 3  0.40285346  1.4655282 -1.4952788  2.9162340 -2.3893208  2.8161423
## 4  2.97446084  2.3896182  2.3083484 -1.1894441 -2.1982553  1.3666242
## 5 -0.98465055  0.6900643  1.5894209 -2.1204538  1.7961155 -0.9362189
## 6 -1.45726359 -2.1139548 -0.3964904  1.1764175 -2.9100556 -2.1359294
##       stat23     stat24     stat25    stat26     stat27      stat28
## 1 -2.3718851  2.8580718 -0.4719713 -2.817086 -0.9518474  2.88892484
## 2 -2.3293245  1.5577759 -1.9569720  1.554194 -0.5081459 -1.58715141
## 3 -2.5402296  0.1422861  0.3572798 -1.051886 -2.1541717  0.03074004
## 4 -1.9679050 -1.4077642  2.5097435  1.683121 -0.2549745 -2.90384054
## 5  2.0523429 -2.2084844 -1.9280857 -2.116736  1.8180779 -1.42167580
## 6  0.2184991 -0.7599817  2.6880329 -2.903350 -1.0733233 -2.92416644
##       stat29     stat30     stat31     stat32      stat33     stat34
## 1  0.7991088 -2.0059092 -0.2461502  0.6482101 -2.87462163 -0.3601543
## 2  1.9758110 -0.3874187  1.3566630  2.6493473  2.28463054  1.8591728
## 3 -0.4460218  1.0279679  1.3998452 -1.0183365  1.41109037 -2.4183984
## 4  1.0571996  2.5588036 -2.9830337 -1.1299983  0.05470414 -1.5566561
## 5  0.8854889  2.2774174  2.6499031  2.3053405 -2.39148426 -1.8272992
## 6 -0.8405267  0.1311945  0.4321289 -2.9622040 -2.55387473  2.6396458
##       stat35     stat36     stat37     stat38     stat39     stat40
## 1  2.4286051 -0.5420244 -2.6782637 -2.8874269 -0.8945006  1.1749642
## 2  1.3709245 -1.3714181  1.3901204  1.2273489 -0.8934880  1.0540369
## 3 -0.9805572  2.0571353  0.8845031  2.0574493  1.1222047  1.8528618
## 4  1.0969149 -2.2820673  1.8852408  0.5391517  2.7334342 -0.4372566
## 5 -1.0971669  1.4867796 -2.3738465 -0.3743561  1.4266498  1.2551680
## 6  0.4584349 -2.2696617 -0.9935142 -0.5350499 -0.7874799  2.0009417
##       stat41     stat42     stat43     stat44     stat45      stat46
## 1 -1.0474428 -1.3909023 2.54110503 -1.4320793  0.6298335 -2.09296608
## 2  2.5380247  1.6476108 0.44128850 -2.5049477  1.2726039  1.72492969
## 3  1.1477574  0.2288794 0.08891252  2.3044751 -0.7735722 -0.07302936
## 4 -1.3808300 -2.7900956 2.38297582  0.1686397 -2.1591296  1.60828602
## 5  0.2257536  1.9542116 2.66429019  0.8026123 -1.5521187  1.61751962
## 6 -1.3364114 -2.2898803 2.80735397 -0.8413086  1.0057797 -1.50653386
##       stat47     stat48     stat49     stat50     stat51     stat52
## 1 -2.8318939  2.1445766  0.5668035  0.1544579  0.6291955  2.2197027
## 2 -0.5804687 -1.3689737  1.4908396  1.2465997  0.8896304 -2.6024318
## 3  0.7918019  1.5712964  1.1038082 -0.2545658 -2.1662638  0.2660159
## 4 -1.8894132  0.5680230 -0.7023218 -0.3972188  0.1578027  2.1770194
## 5  2.1088455 -2.7195437  2.1961412 -0.2615084  1.2109556  0.8260623
## 6 -1.4400891 -0.9421459 -1.7324599 -2.1720727 -2.8129435  0.6958785
##      stat53     stat54      stat55     stat56     stat57      stat58
## 1  2.176805  0.5546907 -2.19704103 -0.2884173  1.3232913 -1.32824039
## 2 -2.107441  1.3864788  0.08781975  1.9998228  0.8014438 -0.26979154
## 3  1.234197  2.1337581  1.65231645 -0.4388691 -0.1811156  2.11277962
## 4  2.535406 -2.1387620  0.12856023 -1.9906180  0.9626449  1.65232646
## 5 -2.457080  2.1633499  0.60441124  2.5449364 -1.4978440  2.60542655
## 6  2.003033 -0.5379940 -2.19647264 -1.1954677 -0.5974466 -0.04703835
##        stat59     stat60    stat61      stat62     stat63     stat64
## 1  1.24239659 -2.5798278  1.327928  1.68560362  0.6284891 -1.6798652
## 2  0.06379301  0.9465770  1.116928  0.03128772 -2.1944375  0.3382609
## 3  0.93223447  2.4597080  0.465251 -1.71033382 -0.5156728  1.8276784
## 4 -0.29840910  0.7273473 -2.313066 -1.47696018  2.5910559 -1.5127999
## 5 -1.17610002 -1.7948418 -2.669305  0.17813617  2.8956099  2.9411416
## 6 -1.01793981  0.2817057  2.228023 -0.86494124 -0.9747949 -0.1569053
##       stat65     stat66     stat67     stat68     stat69    stat70
## 1 -2.9490898 -0.3325469  1.5745990 -2.2978280  1.5451891 -1.345990
## 2 -1.1174885 -1.5728682 -2.9229002  0.2658547 -1.9616533  2.506130
## 3 -0.2231264 -0.4503301  0.7932286 -1.2453773 -2.2309763  2.309761
## 4 -0.3522418 -2.0720532  0.9442933  2.9212906  0.5100371 -2.441108
## 5 -2.1648991  1.2002029  2.8266985  0.7461294  1.6772674 -1.280000
## 6 -2.2295458  1.1446493  0.2024925 -0.2983998 -2.8203752  1.224030
##       stat71     stat72     stat73     stat74      stat75     stat76
## 1  1.0260956  2.1071210  2.6625669 -2.8924677 -0.02132523 -2.5058765
## 2  0.3525076  1.6922342 -1.2167022 -1.7271879  2.21176434  1.9329683
## 3 -2.1799035 -2.2645276  0.1415582  0.9887453  1.95592320  0.2951785
## 4 -2.4051409  2.0876484 -0.8632146  0.4011389 -1.16986716 -1.2391174
## 5  1.3538754 -0.8089395 -0.5122626 -2.1696892  1.07344925  2.6696169
## 6 -2.8073371 -1.4450488  0.5481212 -1.4381690  0.80917043 -0.1365944
##       stat77      stat78      stat79     stat80     stat81     stat82
## 1 -2.5631845 -2.40331340  0.38416120 -1.2564875 -0.1550840 -1.1762617
## 2 -0.4462085  0.38400793  1.80483031 -0.8387642  0.7624431  0.9936900
## 3  1.6757870 -1.81900752  2.70904708 -0.3201959  2.5754235  1.6346260
## 4 -2.1012006 -2.24691081  1.78056848  1.0323739  1.0762523  2.1343851
## 5 -2.5736733 -1.99958372 -0.05388495 -2.5630073 -2.8783002 -0.5752426
## 6  1.6143972  0.03233746  2.90835762  1.4000487  2.9275615 -2.8503830
##       stat83     stat84     stat85     stat86    stat87     stat88
## 1  1.2840565 -2.6794965  1.3956039 -1.5290235  2.221152  2.3794982
## 2 -0.2380048  1.9314318 -1.6747955 -0.3663656  1.582659 -0.5222489
## 3 -0.9150769 -1.5520337  2.4186287  2.7273662  1.306642  0.1320062
## 4 -2.5824408 -2.7775943  0.5085060  0.4689015  2.053348  0.7957955
## 5 -1.0017741 -0.2009138  0.3770109  2.4335201 -1.118058  1.3953410
## 6  2.4891765  2.9931953 -1.4171852  0.3905659 -1.856119 -2.1690490
##       stat89     stat90     stat91      stat92     stat93     stat94
## 1 -0.9885110 -0.8873261 -2.7810929 -1.53325891  2.6002395  1.8890998
## 2  0.9982028 -1.2382015 -0.1574496  0.41086048 -0.5412626 -0.2421387
## 3  0.5956759  1.6871066  2.2452753  2.74279594 -1.5860478  2.9393122
## 4  2.0902634  2.1752586 -2.0677712 -2.37861037  1.1653302  0.1500632
## 5  2.9820614  0.8111660 -0.7842287  0.03766387 -1.1681970  2.1217251
## 6 -1.7428021  0.1579032  1.7456742 -0.36858466 -0.1304616 -1.4555819
##       stat95     stat96      stat97     stat98     stat99   stat100
## 1 -2.6056035 -0.5814857  2.57652426 -2.3297751  2.6324007  1.445827
## 2 -2.0271583 -0.9126074  2.49582648  0.9745382  1.1339203 -2.549544
## 3  0.3823181 -0.6324139  2.46221566  1.1151560  0.4624891  0.107072
## 4  2.6414623 -0.6630505  2.10394859  1.2627635  0.4861740  1.697012
## 5  1.4642254  2.6485956 -0.07699547  0.6219473 -1.8815142 -2.685463
## 6  1.8937331 -0.4690555  1.04671776 -0.5879866 -0.9766789  2.405940
##      stat101   stat102    stat103    stat104    stat105    stat106
## 1 -2.1158021  2.603936  1.7745128 -1.8903574 -1.8558655  1.0122044
## 2 -2.7998588 -2.267895  0.5336456 -0.2859477 -0.5196246 -0.9417582
## 3  0.7969509 -1.744906 -0.7960327  1.9767258 -0.2007264 -0.7872376
## 4  1.7071959 -1.540221  1.6770362  1.5395796 -0.4855365 -1.2894115
## 5 -1.4627420 -1.700983  2.4376490  0.2731541  1.5275587  1.3256483
## 6  2.6888530  1.090155  2.0769854  1.9615480  1.8689761  2.8975825
##     stat107    stat108   stat109    stat110    stat111    stat112
## 1  1.954508 -0.3376471  2.503084  0.3099165  2.7209847 -2.3911204
## 2 -2.515160  0.3998704 -1.077093  2.4228268 -0.7759693  0.2513882
## 3  1.888827  1.5819857 -2.066659 -2.0008364  0.6997684  2.6157095
## 4  1.076395 -1.8524148 -2.689204  1.0985872  1.2389493  2.1018629
## 5  2.828866 -1.8590252 -2.424163  1.4391942 -0.6173239 -1.5218846
## 6 -1.419639  0.7888914  1.996463  0.9813507  0.9034198  1.3810679
##     stat113    stat114    stat115     stat116   stat117    stat118
## 1 -1.616161  1.0878664  0.9860094 -0.06288462 -1.013501 -1.2212842
## 2 -1.554771  1.8683100  0.4880588 -0.63865489 -1.610217 -1.7713343
## 3 -2.679801 -2.9486952  1.7753417  0.90311784 -1.318836 -0.1429040
## 4  2.459229 -0.5584171  0.4419581 -0.09586351  0.595442  0.2883342
## 5 -2.102200  1.6300170 -2.3498287  1.36771894 -1.912202 -0.2563821
## 6 -1.835037  0.6577786 -2.9928374  2.13540316 -1.437299 -0.9570006
##      stat119    stat120    stat121    stat122    stat123     stat124
## 1  2.9222729  1.9151262  1.6686068  2.0061224  1.5723072  0.78819227
## 2  2.1828208  0.8283178 -2.4458632  1.7133740  1.1393738 -0.07182054
## 3  0.9721319  1.2723130  2.8002086  2.7670381 -2.2252586  2.17499113
## 4 -1.9327896 -2.5369370  1.7835028  1.0262097 -1.8790983 -0.43639564
## 5  1.3230809 -2.8145256 -0.9547533 -2.0435417 -0.2758764 -1.85668027
## 6  0.1720700 -1.4568460  1.4115051 -0.9878145  2.3895061 -2.33730745
##     stat125    stat126    stat127   stat128    stat129     stat130
## 1  1.588372  1.1620011 -0.2474264  1.650328  2.5147598  0.37283245
## 2 -1.173771  0.8162020  0.3510315 -1.263667  1.7245284 -0.72852904
## 3 -1.503497 -0.5656394  2.8040256 -2.139287 -1.7221642  2.17899609
## 4  1.040967 -2.9039600  0.3103742  1.462339 -1.2940350 -2.95015502
## 5 -2.866184  1.6885070 -2.2525666 -2.628631  1.8581577  2.80127025
## 6 -1.355111  1.5017927  0.4295921 -0.580415  0.9851009 -0.03773117
##       stat131    stat132    stat133    stat134    stat135      stat136
## 1 -0.09028241  0.5194538  2.8478346  2.6664724 -2.0206311  1.398415090
## 2 -0.53045595  1.4134049  2.9180586  0.3299096  1.4784122 -1.278896090
## 3  1.35843194  0.2279946  0.3532595  0.6138676 -0.3443284  0.057763811
## 4 -1.92450273  1.2698178 -1.5299660 -2.6083462  1.1665530 -0.187791914
## 5  1.49036849  2.6337729 -2.3206244  0.4978287 -1.7397571  0.001200184
## 6 -0.64642709 -1.9256228  1.7032650 -0.9152725 -0.3188055  2.155395980
##      stat137    stat138    stat139    stat140    stat141    stat142
## 1 -1.2794871  0.4064890 -0.4539998  2.6660173 -1.8375313  0.4711883
## 2 -2.7709017 -1.6303773 -1.9025910  0.2572918  0.6612002  1.4764348
## 3 -1.1930757 -0.1051243 -0.5108380 -1.0879666  2.4969513 -0.9477230
## 4 -1.2318919  2.2348571  0.1788580 -1.5851788 -1.2384283 -2.1859181
## 5  1.8685058  2.7229517 -2.9077182  2.6606939 -1.5963592 -2.2213492
## 6 -0.4807318 -1.2117369 -0.9358531 -2.5100758 -2.3803916 -0.7096854
##     stat143    stat144    stat145    stat146     stat147    stat148
## 1 1.9466263  2.2689433 -0.3597288 -0.6551386  1.65438592  0.6404466
## 2 1.3156421  2.4459090 -0.3790028  1.4858465 -0.07784461  1.0096149
## 3 0.1959563  2.3062942  1.8459278  2.6848175 -2.70935774 -1.2093409
## 4 1.7633296 -2.8171508  2.0902622 -2.6625464 -1.12600601 -2.1926479
## 5 0.3885758  1.8160636  2.8257299 -1.4526173  1.60679603  2.3807991
## 6 0.7623450  0.2692145 -2.4307463 -2.1244523 -2.67803812 -1.5273387
##      stat149   stat150    stat151    stat152    stat153    stat154
## 1  0.1583575 0.4755351  0.3213410  2.0241520  1.5720103 -0.1825875
## 2 -0.4311406 2.9577663  0.6937252  0.1397280  0.3775735 -1.1012636
## 3 -0.8352824 2.5716205  1.7528236  0.4326277 -2.2334397 -2.6265771
## 4 -2.8069143 1.8813509  2.3358023  0.1015632  1.2117474 -1.3714278
## 5 -1.6166265 1.1112266 -1.1998471  2.9316769 -2.1676455 -0.3411089
## 6 -0.2265472 2.7264354 -1.6746094 -2.3376281 -1.7022788 -1.2352397
##     stat155     stat156    stat157    stat158     stat159   stat160
## 1 -1.139657  0.07061254  0.5893906 -1.9920996 -2.83714366  2.249398
## 2 -2.041093  0.74047768  2.5415072 -1.2697256 -1.64364433 -2.448922
## 3 -1.219507 -0.55198693  0.4046920  1.2098547 -0.90412390 -1.934093
## 4  2.992191  2.33222485  2.0622969 -0.6714653  2.76836085 -1.431120
## 5 -2.362356 -1.23906672  0.4746319 -0.7849202  0.69399995  2.052411
## 6 -1.604499  1.31051409 -0.5164744  0.6288667  0.07899523 -2.287402
##      stat161    stat162    stat163    stat164    stat165    stat166
## 1  1.7182635 -1.2323593  2.7350423  1.0707235  1.1621544  0.9493989
## 2 -0.6247674  2.6740098  2.8211024  1.5561292 -1.1027147  1.0519739
## 3 -0.6230453 -0.7993517 -2.8318374 -1.1148673  1.4261659  0.5294309
## 4  1.7644744  0.1696584  1.2653207  0.6621516  0.9470508  0.1985014
## 5 -1.2070210  0.7243784  0.9736322  2.7426259 -2.6862383  1.6840212
## 6  2.3705316 -2.1667893 -0.2516685 -0.8425958 -1.9099342 -2.8607297
##      stat167    stat168    stat169     stat170     stat171    stat172
## 1  0.1146510  2.3872008  1.1180918 -0.95370555 -2.25076509  0.2348182
## 2  1.0760417 -2.0449336  0.9715676 -0.40173489 -0.11953555 -2.3107369
## 3  1.1735898  1.3860190 -2.2894719  0.06350347  0.29191551 -1.6079744
## 4  2.5511832  0.5446648  1.2694012 -0.84571201  0.79789722  0.2623538
## 5  2.2900002  2.6289782 -0.2783571  1.39032829 -0.55532032  1.0499046
## 6 -0.7513983  2.9617066 -2.2119520 -1.71958113 -0.01452018 -0.2751517
##       stat173   stat174     stat175    stat176     stat177    stat178
## 1  1.79366076 -1.920206 -0.38841942  0.8530301  1.64532077 -1.1354179
## 2 -0.07484659  1.337846  2.20911694  0.9616837 -2.80810070 -2.1136749
## 3 -1.05521810 -1.483741  0.06148359  2.3066039 -0.34688616  1.1840581
## 4  0.31460321  1.195741  2.97633862  1.1685091 -0.06346265  1.4205489
## 5 -1.39428365  2.458523  0.64836472 -1.0396386 -0.57828104 -0.5006818
## 6  2.31844401  1.239864 -2.06490874  0.7696204 -1.77586019  2.0855925
##      stat179    stat180     stat181    stat182    stat183    stat184
## 1  2.0018647  0.1476815 -1.27279520  1.9181504 -0.5297624 -2.9718938
## 2 -2.1351449  2.9012582 -1.09914911 -2.5488517 -2.8377736  1.4073374
## 3 -1.7819908  2.9902627  0.81908613  0.2503852  0.3712984 -2.1714024
## 4 -0.1026974 -2.4763253 -2.52645421  1.3096315  2.1458161 -1.5228094
## 5 -2.2298794  2.4465680 -0.70346898 -1.6997617  2.9178164 -0.3615532
## 6 -1.1168108  1.5552123 -0.01361342  1.7338791 -1.1104763  0.1882416
##      stat185    stat186   stat187    stat188    stat189    stat190
## 1 -0.1043832 -1.5047463  2.700351 -2.4780862 -1.9078265  0.9978108
## 2 -2.0310574 -0.5380074 -1.963275 -1.2221278 -2.4290681 -1.9515115
## 3  2.6727278  1.2688179 -1.399018 -2.9612138  2.6456394  2.0073323
## 4 -2.7796295  2.0682354  2.243727  0.4296881  0.1931333  2.2710960
## 5 -0.6231265  2.5833981  2.229041  0.8139584  1.4544131  1.8886451
## 6  2.7204690 -2.4469144 -1.421998  1.7477882 -0.1481806  0.6011560
##      stat191    stat192    stat193    stat194   stat195    stat196
## 1 -0.6644351  2.6270833 -1.1094601 -2.4200392  2.870713 -0.6590932
## 2 -0.6483142  1.4519118 -0.1963493 -2.3025322  1.255608  2.1617947
## 3 -1.5457382 -0.2977442 -1.7045015  0.7962404 -1.696063 -1.4771117
## 4 -1.1780495 -2.9747574 -1.1471518 -1.2377013 -1.010672 -2.6055975
## 5  2.8813178 -1.8964081 -1.2653487 -1.7839754 -2.872581  2.3033464
## 6  0.4437973  0.6599325 -1.4029555 -2.3118258 -1.792232  1.3934380
##       stat197    stat198    stat199    stat200    stat201    stat202
## 1 -0.83056986  0.9550526 -1.7025776 -2.8263099 -0.7023998  0.2272806
## 2 -1.42178249 -1.2471864  2.5723093 -0.0233496 -1.8975239  1.9472262
## 3 -0.19233958 -0.5161456  0.0279946 -1.2333704 -2.9672263 -2.8666208
## 4 -1.23145902  1.4728470 -0.4562025 -2.2983441 -1.5101184  0.2530525
## 5  1.85018563 -1.8269292 -0.6337969 -2.1473246  0.9909850  1.0950903
## 6 -0.09311061  0.5144456 -2.8178268 -2.7555969 -2.3546004 -1.0558939
##        stat203      stat204    stat205    stat206    stat207    stat208
## 1  1.166631220  0.007453276  2.9961641  1.5327307 -2.2293356 -0.9946009
## 2 -0.235396504  2.132749800  0.3707606  1.5604026 -1.0089217  2.1474257
## 3  0.003180946  2.229793310  2.7354040  0.8992231  2.9694967  2.3081024
## 4 -0.474482715 -1.584772230 -2.3224132 -0.9409741 -2.3179255  0.8032548
## 5  2.349412920 -1.276320220 -2.0203719 -1.1733509  1.0371852 -2.5086207
## 6  0.727436960 -0.960191786 -0.8964998 -1.6406623 -0.2330488  1.7993879
##      stat209    stat210   stat211    stat212    stat213    stat214
## 1 -2.2182105 -1.4099774 -1.656754  2.6602585 -2.9270992  1.1240714
## 2 -2.8932488 -1.1641679 -2.605423 -1.5650513  2.9523673  2.0266318
## 3 -1.8279589  0.0472350 -2.026734  2.5054367  0.9903042  0.3274105
## 4 -1.0878067  0.1171303  2.645891 -1.6775225  1.3452160  1.4694063
## 5 -0.8158175  0.4060950  0.912256  0.2925677  2.1610141  0.5679936
## 6 -2.2664354 -0.2061083 -1.435174  2.6645632  0.4216259 -0.6419122
##      stat215    stat216    stat217
## 1 -2.7510750 -0.5501796  1.2638469
## 2  2.8934650 -2.4099574 -1.2411407
## 3 -1.0947676  1.2852937  1.5411530
## 4  0.6343777  0.1345372  2.9102673
## 5  0.9908702  1.7909757 -2.0902610
## 6 -2.8113887 -1.0624912  0.2765074
features = features.highprec
#str(features) 

Checking correlations to evaluate removal of redundant features

corr.matrix = round(cor(features[sapply(features, is.numeric)]),2)

# filter out only highly correlated variables
threshold = 0.6
corr.matrix.tmp = corr.matrix
diag(corr.matrix.tmp) = 0
high.corr = apply(abs(corr.matrix.tmp) >= threshold, 1, any)
high.corr.matrix = corr.matrix.tmp[high.corr, high.corr]

DT::datatable(corr.matrix)
DT::datatable(high.corr.matrix)

Feature Names

feature.names = colnames(features)
drops <- c('JobName')
feature.names = feature.names[!(feature.names %in% drops)]
#str(feature.names)

Read and Clean Labels

labels = read.csv("../../Data/labels.csv")
#str(labels)
labels = labels[,c("JobName", output.var)]
summary(labels)
##       JobName           y3        
##  Job_00001:   1   Min.   : 95.91  
##  Job_00002:   1   1st Qu.:118.21  
##  Job_00003:   1   Median :123.99  
##  Job_00004:   1   Mean   :125.36  
##  Job_00005:   1   3rd Qu.:131.06  
##  Job_00006:   1   Max.   :193.73  
##  (Other)  :9994   NA's   :2497

Merge Datasets

data <- merge(features, labels, by = 'JobName')
drops <- c('JobName')
data = data[,(!colnames(data) %in% drops)]
#str(data)

Transformations

if (transform.abs == TRUE){
  data[,label.names] = 10^(data[,label.names]/20)
  data = filter(data, y3 < 1E7)
}


#str(data)
if (log.pred == TRUE){
  data[label.names] = log(data[alt.scale.label.name],10)
  
  drops = c(alt.scale.label.name)
  data = data[!(names(data) %in% drops)]
}
#str(data)

Remove NA Cases

data = data[complete.cases(data),]

Exploratory Data Analysis

Check correlation of Label with Featires

if (eda == TRUE){
  corr.to.label =round(cor(dplyr::select(data,-one_of(label.names)),dplyr::select_at(data,label.names)),4)
  DT::datatable(corr.to.label)
}

Multicollinearity - VIF

if (eda == TRUE){
  vifDF = usdm::vif(select_at(data,feature.names)) %>% arrange(desc(VIF))
  head(vifDF,10)
}

Scatterplots

panel.hist <- function(x, ...)
{
    usr <- par("usr"); on.exit(par(usr))
    par(usr = c(usr[1:2], 0, 1.5) )
    h <- hist(x, plot = FALSE)
    breaks <- h$breaks; nB <- length(breaks)
    y <- h$counts; y <- y/max(y)
    rect(breaks[-nB], 0, breaks[-1], y, col = "cyan", ...)
}
if (eda == TRUE){
  histogram(data[ ,label.names])
  #hist(data[complete.cases(data),alt.scale.label.name])
}
# https://stackoverflow.com/questions/24648729/plot-one-numeric-variable-against-n-numeric-variables-in-n-plots
ind.pairs.plot <- function(data, xvars=NULL, yvar)
{
    df <- data
    if (is.null(xvars)) {
        xvars = names(data[which(names(data)!=yvar)])       
    }   

    #choose a format to display charts
    ncharts <- length(xvars) 
    
    for(i in 1:ncharts){    
        plot(df[,xvars[i]],df[,yvar], xlab = xvars[i], ylab = yvar)
    }
}

if (eda == TRUE){
  ind.pairs.plot(data, feature.names, label.names)
}

# 
# pl <- ggplot(data, aes(x=x18, y = y3))
# pl2 <- pl + geom_point(aes(alpha = 0.1)) # default color gradient based on 'hp'
# print(pl2)

Feature Engineering

if(eda ==FALSE){
  # x18 may need transformations
  plot(data[,'x18'], data[,label.names], main = "Original Scatter Plot vs. x18", ylab = label.names, xlab = 'x18')
  plot(sqrt(data[,'x18']), data[,label.names], main = "Original Scatter Plot vs. sqrt(x18)", ylab = label.names, xlab = 'sqrt(x18)')
  
  # transforming x18
  data$sqrt.x18 = sqrt(data$x18)
  data = dplyr::select(data,-one_of('x18'))
  
  # what about x7, x9?
  # x11 looks like data is at discrete points after a while. Will this be a problem?
}

Modeling

Train Test Split

data = data[sample(nrow(data)),] # randomly shuffle data
split = sample.split(data[,label.names], SplitRatio = 0.8)

data.train = subset(data, split == TRUE)
data.test = subset(data, split == FALSE)

Common Functions

plot.diagnostics <-  function(model, train) {
  plot(model)
  
  residuals = resid(model) # Plotted above in plot(lm.out)
  r.standard = rstandard(model)
  r.student = rstudent(model)

  plot(predict(model,train),r.student,
      ylab="Student Residuals", xlab="Predicted Values", 
      main="Student Residual Plot") 
  abline(0, 0)
  
  plot(predict(model, train),r.standard,
      ylab="Standard Residuals", xlab="Predicted Values", 
      main="Standard Residual Plot") 
  abline(0, 0)
  abline(2, 0)
  abline(-2, 0)
  
  # Histogram
  hist(r.student, freq=FALSE, main="Distribution of Studentized Residuals", 
  xlab="Studentized Residuals", ylab="Density", ylim=c(0,0.5))

  # Create range of x-values for normal curve
  xfit <- seq(min(r.student)-1, max(r.student)+1, length=40)

  # Generate values from the normal distribution at the specified values
  yfit <- (dnorm(xfit))

  # Add the normal curve
  lines(xfit, yfit, ylim=c(0,0.5))
  
  
  # http://www.stat.columbia.edu/~martin/W2024/R7.pdf
  # Influential plots
  inf.meas = influence.measures(model)
  # print (summary(inf.meas)) # too much data
  
  # Leverage plot
  lev = hat(model.matrix(model))
  plot(lev, ylab = 'Leverage - check')
  
  # Cook's Distance
  cd = cooks.distance(model)
  plot(cd,ylab="Cooks distances")
  abline(4/nrow(train),0)
  abline(1,0)
  
  print (paste("Number of data points that have Cook's D > 4/n: ", length(cd[cd > 4/nrow(train)]), sep = "")) 
  print (paste("Number of data points that have Cook's D > 1: ", length(cd[cd > 1]), sep = "")) 
  return(cd)
}

train.caret.glmselect = function(formula, data, method
                                 ,subopt = NULL, feature.names
                                 , train.control = NULL, tune.grid = NULL, pre.proc = NULL){
  
  if(is.null(train.control)){
    train.control <- trainControl(method = "cv"
                              ,number = 10
                              ,search = "grid"
                              ,verboseIter = TRUE
                              ,allowParallel = TRUE
                              )
  }
  
  if(is.null(tune.grid)){
    if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
      tune.grid = data.frame(nvmax = 1:length(feature.names))
    }
    if (method == 'glmnet' && subopt == 'LASSO'){
      # Will only show 1 Lambda value during training, but that is OK
      # https://stackoverflow.com/questions/47526544/why-need-to-tune-lambda-with-carettrain-method-glmnet-and-cv-glmnet
      # Another option for LASSO is this: https://github.com/topepo/caret/blob/master/RegressionTests/Code/lasso.R
      lambda = 10^seq(-2,0, length =100)
      alpha = c(1)
      tune.grid = expand.grid(alpha = alpha,lambda = lambda)
    }
    if (method == 'lars'){
      # https://github.com/topepo/caret/blob/master/RegressionTests/Code/lars.R
      fraction = seq(0, 1, length = 100)
      tune.grid = expand.grid(fraction = fraction)
      pre.proc = c("center", "scale") 
    }
  }
  
  # http://sshaikh.org/2015/05/06/parallelize-machine-learning-in-r-with-multi-core-cpus/
  cl <- makeCluster(detectCores()*0.75) # use 75% of cores only, leave rest for other tasks
  registerDoParallel(cl)

  set.seed(1) 
  # note that the seed has to actually be set just before this function is called
  # settign is above just not ensure reproducibility for some reason
  model.caret <- caret::train(formula
                              , data = data
                              , method = method
                              , tuneGrid = tune.grid
                              , trControl = train.control
                              , preProc = pre.proc
                              )
  
  stopCluster(cl)
  registerDoSEQ() # register sequential engine in case you are not using this function anymore
  
  if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
    print(model.caret$results) # all model results
    print(model.caret$bestTune) # best model
  
    model = model.caret$finalModel

    # Residuals Plot  MMORO #
    # leap function doens support studentized residuals
    dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
    residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
      geom_point(color='light blue',alpha=0.7) +
      geom_smooth()+
      theme_light()
    plot(residPlot)
   
    residHistogram = ggplot(dataPlot,aes(x=res)) +
      geom_histogram(aes(y=..density..),fill='light blue',alpha=1) +
      geom_density(color='lightblue4') + 
      theme_light()
    plot(residHistogram)
    
    # Provides the coefficients of the best model
    id = rownames(model.caret$bestTune)
    message("Coefficients of final model:")
    print (coef(model, id = id))
    # Need to find alternate to plotting diagnostic plots
    # plot.diagnostics(model.forward,data.train)
    # plot(model.forward,labels = colnames(data.train),scale=c("bic")) ## too many variables
    return(list(model = model,id = id,residPlot = residPlot ,residHistogram=residHistogram))
  }
  if (method == 'glmnet' && subopt == 'LASSO'){
    print(model.caret)
    print(plot(model.caret))
    print(model.caret$bestTune)
    
    # Metrics Plot MMORO#
    dataPlot = cbind(model.caret$results, id=as.numeric(rownames(model.caret$results))) %>%
      gather(key='metric',value='value',-id) %>%
      dplyr::filter(metric %in% c('MAE','RMSE','Rsquared'))
    metricsPlot = ggplot(data=dataPlot,aes(x=id,y=value) ) +
      geom_line(color='lightblue4') +
      geom_point(color='blue',alpha=0.7,size=.9) +
      facet_wrap(~metric,ncol=4,scales='free_y')+
      theme_light()
    plot(metricsPlot)
    
    # Residuals Plot MMORO#
    dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
    residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
      geom_point(color='light blue',alpha=0.7) +
      geom_smooth()+
      theme_light()
    plot(residPlot)
    
    residHistogram = ggplot(dataPlot,aes(x=res)) +
      geom_histogram(aes(y=..density..),fill='light blue',alpha=1) +
      geom_density(color='lightblue4') + 
      theme_light()
    plot(residHistogram)

        id = NULL # not really needed but added for consistency
    return(list(model = model.caret,id = id,residPlot = residPlot,metricsPlot=metricsPlot ))
  }
  if (method == 'lars'){
    print(model.caret)
    print(plot(model.caret))
    print(model.caret$bestTune)
    
    # Residuals Plot  MMORO#
    dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
    residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
      geom_point(color='light blue',alpha=0.7) +
      geom_smooth()+
      theme_light()
    plot(residPlot)

    residHistogram = ggplot(dataPlot,aes(x=res)) +
      geom_histogram(aes(y=..density..),fill='light blue',alpha=1) +
      geom_density(color='lightblue4') + 
      theme_light()
    plot(residHistogram)

    id = NULL # not really needed but added for consistency
    return(list(model = model.caret,id = id,residPlot = residPlot ,residHistogram=residHistogram))
  }
}

# https://stackoverflow.com/questions/48265743/linear-model-subset-selection-goodness-of-fit-with-k-fold-cross-validation
# changes slightly since call[[2]] was just returning "formula" without actually returnign the value in formula
predict.regsubsets <- function(object, newdata, id, formula, ...) {
    #form <- as.formula(object$call[[2]])
    mat <- model.matrix(formula, newdata) # adds intercept and expands any interaction terms
    coefi <- coef(object, id = id)
    xvars <- names(coefi)
    return(mat[,xvars]%*%coefi)
}
  
test.model = function(model, test, level=0.95
                      ,draw.limits = FALSE, good = 0.1, ok = 0.15
                      ,method = NULL, subopt = NULL
                      ,id = NULL, formula, feature.names, label.names){
  ## if using caret for glm select equivalent functionality, 
  ## need to set regsubset = TRUE, pass id of best model through id variable, 
  ## and pass formula (full is ok as it will select subset of variables from there)
  if (is.null(method)){
    pred = predict(model, newdata=test, interval="confidence", level = level) 
  }
  
  if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
    pred = predict.regsubsets(model, newdata = test, id = id, formula = formula)
  }
  
  if (method == 'glmnet' && subopt == 'LASSO'){
    xtest = as.matrix(test[,feature.names]) 
    pred=as.data.frame(predict(model, xtest))
  }
  
  if (method == 'lars'){
    pred=as.data.frame(predict(model, newdata = test))
  }
    
  # Summary of predicted values
  print ("Summary of predicted values: ")
  print(summary(pred[,1]))

  test.mse = mean((test[,label.names]-pred[,1])^2)
  print (paste(method, subopt, "Test MSE:", test.mse, sep=" "))

  plot(test[,label.names],pred[,1],xlab = "Actual", ylab = "Predicted")
  abline(0,(1+good),col='green', lwd = 3)
  abline(0,(1-good),col='green', lwd = 3)
  abline(0,(1+ok),col='blue', lwd = 3)
  abline(0,(1-ok),col='blue', lwd = 3)
  
}

Setup Formulae

n <- names(data.train)
formula <- as.formula(paste(paste(n[n %in% label.names], collapse = " + ")," ~", paste(n[!n %in% label.names], collapse = " + "))) 
grand.mean.formula = as.formula(paste(paste(n[n %in% label.names], collapse = " + ")," ~ 1"))
print(formula)
## y3 ~ x1 + x2 + x3 + x4 + x5 + x6 + x7 + x8 + x9 + x10 + x11 + 
##     x12 + x13 + x14 + x15 + x16 + x17 + x19 + x20 + x21 + x22 + 
##     x23 + stat1 + stat2 + stat3 + stat4 + stat5 + stat6 + stat7 + 
##     stat8 + stat9 + stat10 + stat11 + stat12 + stat13 + stat14 + 
##     stat15 + stat16 + stat17 + stat18 + stat19 + stat20 + stat21 + 
##     stat22 + stat23 + stat24 + stat25 + stat26 + stat27 + stat28 + 
##     stat29 + stat30 + stat31 + stat32 + stat33 + stat34 + stat35 + 
##     stat36 + stat37 + stat38 + stat39 + stat40 + stat41 + stat42 + 
##     stat43 + stat44 + stat45 + stat46 + stat47 + stat48 + stat49 + 
##     stat50 + stat51 + stat52 + stat53 + stat54 + stat55 + stat56 + 
##     stat57 + stat58 + stat59 + stat60 + stat61 + stat62 + stat63 + 
##     stat64 + stat65 + stat66 + stat67 + stat68 + stat69 + stat70 + 
##     stat71 + stat72 + stat73 + stat74 + stat75 + stat76 + stat77 + 
##     stat78 + stat79 + stat80 + stat81 + stat82 + stat83 + stat84 + 
##     stat85 + stat86 + stat87 + stat88 + stat89 + stat90 + stat91 + 
##     stat92 + stat93 + stat94 + stat95 + stat96 + stat97 + stat98 + 
##     stat99 + stat100 + stat101 + stat102 + stat103 + stat104 + 
##     stat105 + stat106 + stat107 + stat108 + stat109 + stat110 + 
##     stat111 + stat112 + stat113 + stat114 + stat115 + stat116 + 
##     stat117 + stat118 + stat119 + stat120 + stat121 + stat122 + 
##     stat123 + stat124 + stat125 + stat126 + stat127 + stat128 + 
##     stat129 + stat130 + stat131 + stat132 + stat133 + stat134 + 
##     stat135 + stat136 + stat137 + stat138 + stat139 + stat140 + 
##     stat141 + stat142 + stat143 + stat144 + stat145 + stat146 + 
##     stat147 + stat148 + stat149 + stat150 + stat151 + stat152 + 
##     stat153 + stat154 + stat155 + stat156 + stat157 + stat158 + 
##     stat159 + stat160 + stat161 + stat162 + stat163 + stat164 + 
##     stat165 + stat166 + stat167 + stat168 + stat169 + stat170 + 
##     stat171 + stat172 + stat173 + stat174 + stat175 + stat176 + 
##     stat177 + stat178 + stat179 + stat180 + stat181 + stat182 + 
##     stat183 + stat184 + stat185 + stat186 + stat187 + stat188 + 
##     stat189 + stat190 + stat191 + stat192 + stat193 + stat194 + 
##     stat195 + stat196 + stat197 + stat198 + stat199 + stat200 + 
##     stat201 + stat202 + stat203 + stat204 + stat205 + stat206 + 
##     stat207 + stat208 + stat209 + stat210 + stat211 + stat212 + 
##     stat213 + stat214 + stat215 + stat216 + stat217 + sqrt.x18
print(grand.mean.formula)
## y3 ~ 1
# Update feature.names because we may have transformed some features
feature.names = n[!n %in% label.names]

Full & Grand Means Model

model.full = lm(formula , data.train)
summary(model.full)
## 
## Call:
## lm(formula = formula, data = data.train)
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -21.316  -6.021  -1.721   4.349  67.462 
## 
## Coefficients:
##               Estimate Std. Error t value Pr(>|t|)    
## (Intercept)  8.522e+01  2.779e+00  30.667  < 2e-16 ***
## x1          -1.189e-01  1.897e-01  -0.626 0.531076    
## x2           1.101e-01  1.212e-01   0.908 0.363991    
## x3           2.443e-02  3.322e-02   0.735 0.462248    
## x4          -1.541e-02  2.639e-03  -5.840 5.51e-09 ***
## x5           1.390e-01  8.585e-02   1.618 0.105614    
## x6           1.296e-01  1.740e-01   0.745 0.456445    
## x7           3.228e+00  1.854e-01  17.408  < 2e-16 ***
## x8           1.199e-01  4.326e-02   2.772 0.005588 ** 
## x9           9.618e-01  9.682e-02   9.934  < 2e-16 ***
## x10          3.595e-01  8.981e-02   4.003 6.34e-05 ***
## x11          7.508e+07  2.163e+07   3.472 0.000521 ***
## x12         -6.883e-02  5.481e-02  -1.256 0.209222    
## x13          3.467e-02  2.184e-02   1.588 0.112436    
## x14         -1.567e-01  9.419e-02  -1.663 0.096317 .  
## x15          1.907e-02  8.973e-02   0.213 0.831690    
## x16          2.757e-01  6.207e-02   4.442 9.07e-06 ***
## x17          4.098e-01  9.429e-02   4.346 1.41e-05 ***
## x19          7.682e-02  4.854e-02   1.582 0.113624    
## x20         -1.570e-01  3.347e-01  -0.469 0.638970    
## x21          3.279e-02  1.235e-02   2.656 0.007934 ** 
## x22         -5.531e-03  1.002e-01  -0.055 0.955998    
## x23          5.966e-02  9.542e-02   0.625 0.531813    
## stat1        3.435e-02  7.214e-02   0.476 0.633963    
## stat2       -9.943e-03  7.160e-02  -0.139 0.889558    
## stat3        1.867e-01  7.192e-02   2.596 0.009455 ** 
## stat4       -1.311e-01  7.248e-02  -1.809 0.070487 .  
## stat5       -2.117e-02  7.235e-02  -0.293 0.769893    
## stat6       -3.144e-02  7.221e-02  -0.435 0.663245    
## stat7       -6.992e-02  7.194e-02  -0.972 0.331076    
## stat8       -8.722e-03  7.243e-02  -0.120 0.904155    
## stat9       -3.917e-02  7.222e-02  -0.542 0.587565    
## stat10      -1.171e-01  7.185e-02  -1.630 0.103158    
## stat11      -4.296e-02  7.285e-02  -0.590 0.555407    
## stat12       6.196e-02  7.189e-02   0.862 0.388819    
## stat13      -1.378e-01  7.239e-02  -1.904 0.057016 .  
## stat14      -2.261e-01  7.181e-02  -3.148 0.001651 ** 
## stat15      -1.172e-01  7.192e-02  -1.629 0.103330    
## stat16      -4.123e-02  7.229e-02  -0.570 0.568409    
## stat17      -4.816e-02  7.160e-02  -0.673 0.501230    
## stat18      -7.754e-02  7.185e-02  -1.079 0.280530    
## stat19       5.221e-02  7.198e-02   0.725 0.468287    
## stat20      -7.757e-02  7.192e-02  -1.078 0.280861    
## stat21      -1.785e-02  7.227e-02  -0.247 0.804922    
## stat22      -1.504e-01  7.245e-02  -2.076 0.037973 *  
## stat23       1.497e-01  7.176e-02   2.087 0.036954 *  
## stat24      -1.349e-01  7.218e-02  -1.868 0.061746 .  
## stat25      -1.464e-01  7.205e-02  -2.032 0.042234 *  
## stat26      -7.900e-02  7.204e-02  -1.097 0.272843    
## stat27       4.882e-02  7.168e-02   0.681 0.495813    
## stat28       4.395e-02  7.231e-02   0.608 0.543356    
## stat29       9.790e-02  7.292e-02   1.342 0.179493    
## stat30       9.607e-02  7.225e-02   1.330 0.183678    
## stat31      -3.239e-02  7.280e-02  -0.445 0.656423    
## stat32      -2.331e-02  7.255e-02  -0.321 0.747954    
## stat33      -1.339e-01  7.186e-02  -1.863 0.062507 .  
## stat34       2.540e-02  7.178e-02   0.354 0.723480    
## stat35      -1.715e-01  7.248e-02  -2.365 0.018044 *  
## stat36       5.769e-03  7.157e-02   0.081 0.935759    
## stat37      -1.023e-01  7.293e-02  -1.402 0.160934    
## stat38       1.242e-01  7.215e-02   1.722 0.085112 .  
## stat39      -3.881e-02  7.184e-02  -0.540 0.589075    
## stat40       1.203e-02  7.208e-02   0.167 0.867450    
## stat41      -9.774e-02  7.155e-02  -1.366 0.172000    
## stat42      -1.187e-01  7.184e-02  -1.652 0.098644 .  
## stat43      -9.748e-02  7.256e-02  -1.343 0.179205    
## stat44       5.667e-02  7.201e-02   0.787 0.431295    
## stat45      -6.296e-02  7.211e-02  -0.873 0.382633    
## stat46       9.301e-02  7.221e-02   1.288 0.197807    
## stat47       6.030e-02  7.261e-02   0.831 0.406251    
## stat48       1.168e-02  7.235e-02   0.162 0.871703    
## stat49       8.379e-02  7.160e-02   1.170 0.241936    
## stat50       6.563e-02  7.169e-02   0.915 0.360013    
## stat51       1.228e-01  7.184e-02   1.709 0.087444 .  
## stat52      -1.334e-02  7.226e-02  -0.185 0.853486    
## stat53      -7.241e-02  7.264e-02  -0.997 0.318883    
## stat54      -1.240e-01  7.238e-02  -1.713 0.086777 .  
## stat55       7.189e-02  7.164e-02   1.004 0.315659    
## stat56      -8.595e-02  7.257e-02  -1.184 0.236288    
## stat57      -7.621e-04  7.146e-02  -0.011 0.991492    
## stat58      -4.609e-02  7.164e-02  -0.643 0.520039    
## stat59       4.980e-02  7.205e-02   0.691 0.489409    
## stat60       1.087e-01  7.216e-02   1.506 0.132174    
## stat61      -8.236e-03  7.228e-02  -0.114 0.909286    
## stat62       6.289e-03  7.211e-02   0.087 0.930504    
## stat63       8.250e-02  7.235e-02   1.140 0.254228    
## stat64      -8.052e-03  7.207e-02  -0.112 0.911040    
## stat65      -6.646e-02  7.285e-02  -0.912 0.361671    
## stat66       3.233e-02  7.292e-02   0.443 0.657503    
## stat67      -1.141e-02  7.246e-02  -0.157 0.874879    
## stat68      -3.092e-02  7.277e-02  -0.425 0.670938    
## stat69       1.965e-02  7.244e-02   0.271 0.786186    
## stat70       7.610e-02  7.177e-02   1.060 0.289040    
## stat71       8.652e-03  7.189e-02   0.120 0.904200    
## stat72       8.593e-02  7.270e-02   1.182 0.237259    
## stat73       7.476e-02  7.210e-02   1.037 0.299843    
## stat74      -3.625e-02  7.227e-02  -0.502 0.616007    
## stat75      -9.702e-02  7.259e-02  -1.336 0.181440    
## stat76       6.088e-02  7.245e-02   0.840 0.400763    
## stat77      -2.619e-02  7.257e-02  -0.361 0.718253    
## stat78      -9.751e-02  7.269e-02  -1.342 0.179801    
## stat79      -2.472e-02  7.250e-02  -0.341 0.733139    
## stat80       6.953e-02  7.252e-02   0.959 0.337691    
## stat81       1.160e-01  7.257e-02   1.598 0.110073    
## stat82       8.191e-02  7.164e-02   1.143 0.252918    
## stat83      -7.892e-02  7.216e-02  -1.094 0.274125    
## stat84       1.153e-02  7.259e-02   0.159 0.873850    
## stat85      -3.971e-02  7.204e-02  -0.551 0.581498    
## stat86       3.675e-02  7.219e-02   0.509 0.610735    
## stat87      -1.269e-01  7.275e-02  -1.745 0.081096 .  
## stat88      -2.576e-02  7.148e-02  -0.360 0.718546    
## stat89      -1.560e-02  7.194e-02  -0.217 0.828368    
## stat90      -2.703e-02  7.219e-02  -0.374 0.708054    
## stat91      -1.129e-01  7.219e-02  -1.563 0.118026    
## stat92      -9.006e-02  7.215e-02  -1.248 0.211988    
## stat93      -1.316e-02  7.303e-02  -0.180 0.856989    
## stat94      -6.372e-02  7.213e-02  -0.883 0.377060    
## stat95      -4.711e-02  7.210e-02  -0.653 0.513571    
## stat96      -7.988e-02  7.226e-02  -1.106 0.268987    
## stat97       4.142e-02  7.173e-02   0.577 0.563654    
## stat98       1.006e+00  7.131e-02  14.107  < 2e-16 ***
## stat99       1.074e-01  7.238e-02   1.484 0.137754    
## stat100      1.722e-01  7.235e-02   2.381 0.017313 *  
## stat101     -6.424e-04  7.256e-02  -0.009 0.992936    
## stat102      4.528e-02  7.210e-02   0.628 0.530030    
## stat103     -9.714e-02  7.332e-02  -1.325 0.185270    
## stat104     -5.732e-02  7.201e-02  -0.796 0.426046    
## stat105      7.047e-02  7.162e-02   0.984 0.325218    
## stat106     -8.684e-02  7.192e-02  -1.207 0.227301    
## stat107      1.789e-02  7.222e-02   0.248 0.804331    
## stat108     -3.693e-02  7.238e-02  -0.510 0.609909    
## stat109      6.567e-02  7.183e-02   0.914 0.360651    
## stat110     -9.488e-01  7.184e-02 -13.207  < 2e-16 ***
## stat111     -1.775e-02  7.255e-02  -0.245 0.806704    
## stat112      2.887e-02  7.256e-02   0.398 0.690772    
## stat113     -8.288e-03  7.290e-02  -0.114 0.909483    
## stat114     -9.862e-04  7.216e-02  -0.014 0.989097    
## stat115      1.421e-02  7.224e-02   0.197 0.844068    
## stat116      3.352e-02  7.264e-02   0.461 0.644514    
## stat117      7.041e-02  7.258e-02   0.970 0.332084    
## stat118     -8.255e-02  7.157e-02  -1.154 0.248748    
## stat119      1.449e-02  7.184e-02   0.202 0.840114    
## stat120      6.957e-02  7.160e-02   0.972 0.331266    
## stat121     -5.835e-03  7.230e-02  -0.081 0.935674    
## stat122     -4.782e-02  7.163e-02  -0.668 0.504478    
## stat123      1.634e-02  7.281e-02   0.224 0.822408    
## stat124     -4.259e-02  7.200e-02  -0.592 0.554155    
## stat125      2.039e-02  7.212e-02   0.283 0.777363    
## stat126      9.105e-02  7.202e-02   1.264 0.206149    
## stat127     -4.363e-02  7.182e-02  -0.607 0.543547    
## stat128     -7.491e-02  7.194e-02  -1.041 0.297792    
## stat129     -2.473e-02  7.168e-02  -0.345 0.730119    
## stat130      5.663e-02  7.228e-02   0.783 0.433387    
## stat131      7.474e-02  7.219e-02   1.035 0.300565    
## stat132      4.500e-02  7.176e-02   0.627 0.530603    
## stat133     -4.470e-02  7.231e-02  -0.618 0.536442    
## stat134     -4.663e-02  7.196e-02  -0.648 0.516989    
## stat135     -1.172e-01  7.197e-02  -1.629 0.103455    
## stat136      2.062e-02  7.224e-02   0.285 0.775287    
## stat137     -5.708e-02  7.175e-02  -0.796 0.426326    
## stat138      5.095e-02  7.228e-02   0.705 0.480953    
## stat139      1.876e-03  7.258e-02   0.026 0.979381    
## stat140     -1.542e-02  7.162e-02  -0.215 0.829519    
## stat141     -1.912e-02  7.143e-02  -0.268 0.788961    
## stat142     -7.433e-05  7.317e-02  -0.001 0.999189    
## stat143     -1.699e-03  7.253e-02  -0.023 0.981316    
## stat144      1.189e-01  7.191e-02   1.653 0.098396 .  
## stat145      1.595e-02  7.307e-02   0.218 0.827243    
## stat146     -1.458e-01  7.255e-02  -2.010 0.044458 *  
## stat147     -6.926e-02  7.330e-02  -0.945 0.344714    
## stat148     -4.804e-02  7.138e-02  -0.673 0.500954    
## stat149     -1.829e-01  7.240e-02  -2.526 0.011565 *  
## stat150      8.040e-03  7.236e-02   0.111 0.911532    
## stat151     -1.457e-01  7.260e-02  -2.007 0.044747 *  
## stat152     -5.063e-02  7.140e-02  -0.709 0.478261    
## stat153      4.820e-02  7.315e-02   0.659 0.509941    
## stat154     -3.323e-02  7.257e-02  -0.458 0.647045    
## stat155      2.093e-03  7.197e-02   0.029 0.976797    
## stat156      1.988e-01  7.282e-02   2.730 0.006360 ** 
## stat157     -8.144e-03  7.189e-02  -0.113 0.909803    
## stat158      6.675e-03  7.267e-02   0.092 0.926815    
## stat159     -6.094e-02  7.206e-02  -0.846 0.397732    
## stat160      7.174e-02  7.212e-02   0.995 0.319883    
## stat161      7.426e-02  7.276e-02   1.021 0.307483    
## stat162      4.334e-03  7.189e-02   0.060 0.951926    
## stat163      3.394e-02  7.287e-02   0.466 0.641448    
## stat164      3.794e-02  7.245e-02   0.524 0.600487    
## stat165     -1.177e-03  7.130e-02  -0.017 0.986830    
## stat166     -1.653e-02  7.138e-02  -0.232 0.816910    
## stat167      2.500e-02  7.202e-02   0.347 0.728487    
## stat168     -2.420e-02  7.198e-02  -0.336 0.736724    
## stat169      6.857e-03  7.228e-02   0.095 0.924423    
## stat170     -3.190e-02  7.194e-02  -0.443 0.657487    
## stat171      2.773e-02  7.255e-02   0.382 0.702264    
## stat172      5.776e-02  7.198e-02   0.802 0.422356    
## stat173     -6.558e-02  7.197e-02  -0.911 0.362282    
## stat174      1.219e-03  7.226e-02   0.017 0.986544    
## stat175     -5.246e-02  7.253e-02  -0.723 0.469504    
## stat176      5.785e-02  7.188e-02   0.805 0.420965    
## stat177      2.425e-02  7.208e-02   0.336 0.736574    
## stat178     -8.375e-02  7.315e-02  -1.145 0.252303    
## stat179     -1.299e-03  7.202e-02  -0.018 0.985605    
## stat180     -4.253e-02  7.188e-02  -0.592 0.554120    
## stat181      1.334e-02  7.272e-02   0.183 0.854450    
## stat182      2.010e-02  7.244e-02   0.277 0.781454    
## stat183      3.772e-02  7.187e-02   0.525 0.599693    
## stat184      2.767e-02  7.279e-02   0.380 0.703835    
## stat185     -4.170e-02  7.150e-02  -0.583 0.559778    
## stat186     -5.193e-02  7.259e-02  -0.715 0.474442    
## stat187     -8.226e-02  7.193e-02  -1.144 0.252842    
## stat188      1.036e-02  7.187e-02   0.144 0.885377    
## stat189      1.040e-01  7.220e-02   1.440 0.149896    
## stat190     -2.728e-02  7.189e-02  -0.380 0.704330    
## stat191     -6.334e-02  7.249e-02  -0.874 0.382286    
## stat192      3.060e-02  7.295e-02   0.419 0.674898    
## stat193     -5.849e-02  7.319e-02  -0.799 0.424285    
## stat194     -4.147e-04  7.204e-02  -0.006 0.995407    
## stat195      1.284e-01  7.221e-02   1.777 0.075546 .  
## stat196     -9.123e-02  7.306e-02  -1.249 0.211792    
## stat197      9.586e-03  7.132e-02   0.134 0.893094    
## stat198     -5.146e-02  7.260e-02  -0.709 0.478495    
## stat199      6.333e-02  7.150e-02   0.886 0.375782    
## stat200     -1.215e-01  7.182e-02  -1.691 0.090881 .  
## stat201     -1.595e-02  7.186e-02  -0.222 0.824323    
## stat202      9.775e-03  7.301e-02   0.134 0.893492    
## stat203     -2.082e-02  7.200e-02  -0.289 0.772499    
## stat204     -6.213e-02  7.169e-02  -0.867 0.386124    
## stat205     -9.848e-02  7.152e-02  -1.377 0.168569    
## stat206      3.709e-02  7.272e-02   0.510 0.610070    
## stat207      8.896e-02  7.215e-02   1.233 0.217601    
## stat208      3.766e-02  7.247e-02   0.520 0.603276    
## stat209     -9.522e-02  7.147e-02  -1.332 0.182833    
## stat210     -8.013e-02  7.263e-02  -1.103 0.269957    
## stat211     -4.448e-02  7.203e-02  -0.618 0.536859    
## stat212      9.513e-02  7.245e-02   1.313 0.189203    
## stat213     -6.251e-02  7.293e-02  -0.857 0.391430    
## stat214     -1.139e-01  7.186e-02  -1.584 0.113136    
## stat215     -9.396e-02  7.235e-02  -1.299 0.194067    
## stat216     -5.133e-02  7.171e-02  -0.716 0.474159    
## stat217      8.278e-02  7.273e-02   1.138 0.255090    
## sqrt.x18     7.641e+00  2.777e-01  27.520  < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 9.488 on 5761 degrees of freedom
## Multiple R-squared:  0.2536, Adjusted R-squared:  0.2225 
## F-statistic: 8.156 on 240 and 5761 DF,  p-value: < 2.2e-16
cd.full = plot.diagnostics(model.full, data.train)

## [1] "Number of data points that have Cook's D > 4/n: 303"
## [1] "Number of data points that have Cook's D > 1: 0"

Checking with removal of high influence points

high.cd = names(cd.full[cd.full > 4/nrow(data.train)])
data.train2 = data.train[!(rownames(data.train)) %in% high.cd,]
model.full2 = lm(formula , data.train2)
summary(model.full2)
## 
## Call:
## lm(formula = formula, data = data.train2)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -17.5270  -4.9838  -0.9342   4.6059  20.4523 
## 
## Coefficients:
##               Estimate Std. Error t value Pr(>|t|)    
## (Intercept)  8.233e+01  2.181e+00  37.759  < 2e-16 ***
## x1          -1.733e-01  1.490e-01  -1.163 0.245036    
## x2           5.628e-02  9.510e-02   0.592 0.553992    
## x3           1.371e-02  2.598e-02   0.528 0.597748    
## x4          -1.725e-02  2.070e-03  -8.331  < 2e-16 ***
## x5           1.830e-01  6.733e-02   2.718 0.006580 ** 
## x6           5.320e-02  1.362e-01   0.391 0.696179    
## x7           3.301e+00  1.452e-01  22.730  < 2e-16 ***
## x8           1.515e-01  3.394e-02   4.465 8.17e-06 ***
## x9           9.603e-01  7.573e-02  12.681  < 2e-16 ***
## x10          4.641e-01  7.057e-02   6.576 5.28e-11 ***
## x11          8.519e+07  1.699e+07   5.015 5.47e-07 ***
## x12         -2.265e-02  4.286e-02  -0.528 0.597298    
## x13          4.055e-02  1.716e-02   2.363 0.018139 *  
## x14         -3.078e-02  7.387e-02  -0.417 0.676946    
## x15          5.229e-02  7.032e-02   0.744 0.457172    
## x16          2.839e-01  4.874e-02   5.825 6.04e-09 ***
## x17          3.926e-01  7.396e-02   5.308 1.15e-07 ***
## x19          6.528e-02  3.805e-02   1.716 0.086260 .  
## x20         -1.442e-01  2.634e-01  -0.547 0.584076    
## x21          3.512e-02  9.704e-03   3.619 0.000299 ***
## x22         -6.834e-02  7.848e-02  -0.871 0.383878    
## x23          1.513e-01  7.483e-02   2.023 0.043166 *  
## stat1       -1.717e-02  5.645e-02  -0.304 0.761042    
## stat2        1.910e-03  5.616e-02   0.034 0.972869    
## stat3        1.705e-01  5.640e-02   3.023 0.002519 ** 
## stat4       -1.473e-01  5.699e-02  -2.585 0.009755 ** 
## stat5       -5.727e-02  5.679e-02  -1.008 0.313316    
## stat6       -4.762e-02  5.660e-02  -0.841 0.400167    
## stat7       -7.918e-02  5.640e-02  -1.404 0.160420    
## stat8       -4.496e-02  5.669e-02  -0.793 0.427719    
## stat9       -2.249e-02  5.662e-02  -0.397 0.691313    
## stat10      -9.334e-02  5.619e-02  -1.661 0.096723 .  
## stat11      -5.395e-02  5.707e-02  -0.945 0.344548    
## stat12       5.690e-02  5.637e-02   1.009 0.312884    
## stat13      -1.290e-01  5.674e-02  -2.274 0.022993 *  
## stat14      -2.846e-01  5.629e-02  -5.056 4.43e-07 ***
## stat15      -1.757e-01  5.645e-02  -3.113 0.001863 ** 
## stat16      -5.217e-02  5.660e-02  -0.922 0.356686    
## stat17      -6.864e-02  5.622e-02  -1.221 0.222159    
## stat18      -3.782e-02  5.630e-02  -0.672 0.501768    
## stat19       5.834e-02  5.653e-02   1.032 0.302172    
## stat20       2.367e-02  5.657e-02   0.418 0.675678    
## stat21      -3.028e-02  5.663e-02  -0.535 0.592852    
## stat22      -8.904e-02  5.666e-02  -1.571 0.116130    
## stat23       1.262e-01  5.643e-02   2.236 0.025412 *  
## stat24      -8.274e-02  5.672e-02  -1.459 0.144667    
## stat25      -9.419e-02  5.648e-02  -1.668 0.095448 .  
## stat26      -9.620e-02  5.665e-02  -1.698 0.089523 .  
## stat27       2.617e-02  5.634e-02   0.464 0.642336    
## stat28      -7.098e-03  5.668e-02  -0.125 0.900340    
## stat29       8.518e-02  5.715e-02   1.490 0.136167    
## stat30       3.401e-02  5.659e-02   0.601 0.547885    
## stat31       1.849e-02  5.709e-02   0.324 0.746114    
## stat32      -3.827e-02  5.704e-02  -0.671 0.502248    
## stat33      -1.026e-01  5.631e-02  -1.821 0.068586 .  
## stat34       5.147e-02  5.628e-02   0.914 0.360510    
## stat35      -1.887e-01  5.703e-02  -3.308 0.000945 ***
## stat36      -5.304e-03  5.627e-02  -0.094 0.924898    
## stat37      -3.345e-02  5.725e-02  -0.584 0.559000    
## stat38       1.019e-01  5.662e-02   1.799 0.072015 .  
## stat39      -6.446e-02  5.627e-02  -1.146 0.251969    
## stat40      -1.504e-02  5.665e-02  -0.265 0.790665    
## stat41      -1.209e-01  5.606e-02  -2.157 0.031074 *  
## stat42      -6.799e-02  5.642e-02  -1.205 0.228210    
## stat43      -5.030e-02  5.701e-02  -0.882 0.377646    
## stat44       5.521e-02  5.664e-02   0.975 0.329732    
## stat45      -2.807e-02  5.660e-02  -0.496 0.619923    
## stat46       4.865e-02  5.665e-02   0.859 0.390524    
## stat47       9.746e-02  5.687e-02   1.714 0.086642 .  
## stat48      -2.115e-02  5.663e-02  -0.373 0.708821    
## stat49      -2.966e-02  5.619e-02  -0.528 0.597657    
## stat50       7.867e-02  5.626e-02   1.398 0.162087    
## stat51       9.322e-02  5.637e-02   1.654 0.098238 .  
## stat52       9.607e-03  5.678e-02   0.169 0.865645    
## stat53      -5.435e-02  5.695e-02  -0.954 0.340009    
## stat54      -1.444e-01  5.694e-02  -2.535 0.011258 *  
## stat55       5.412e-02  5.624e-02   0.962 0.335933    
## stat56      -3.024e-02  5.691e-02  -0.531 0.595215    
## stat57      -2.451e-02  5.618e-02  -0.436 0.662699    
## stat58      -3.533e-02  5.611e-02  -0.630 0.528869    
## stat59       3.972e-02  5.642e-02   0.704 0.481472    
## stat60       1.346e-01  5.667e-02   2.376 0.017541 *  
## stat61      -3.762e-02  5.665e-02  -0.664 0.506647    
## stat62      -3.950e-02  5.649e-02  -0.699 0.484406    
## stat63       4.731e-02  5.677e-02   0.833 0.404754    
## stat64       7.265e-02  5.649e-02   1.286 0.198517    
## stat65      -1.788e-02  5.708e-02  -0.313 0.754076    
## stat66       4.383e-02  5.723e-02   0.766 0.443835    
## stat67       4.010e-02  5.676e-02   0.707 0.479902    
## stat68      -2.036e-02  5.698e-02  -0.357 0.720811    
## stat69       8.631e-04  5.690e-02   0.015 0.987899    
## stat70       6.250e-02  5.635e-02   1.109 0.267430    
## stat71       3.215e-02  5.642e-02   0.570 0.568849    
## stat72       6.220e-02  5.703e-02   1.091 0.275448    
## stat73       4.650e-02  5.668e-02   0.820 0.411983    
## stat74       2.407e-02  5.670e-02   0.424 0.671257    
## stat75      -4.298e-02  5.688e-02  -0.756 0.449907    
## stat76       3.911e-02  5.675e-02   0.689 0.490781    
## stat77       1.921e-02  5.691e-02   0.338 0.735728    
## stat78      -1.345e-01  5.689e-02  -2.364 0.018115 *  
## stat79       5.042e-02  5.662e-02   0.891 0.373180    
## stat80       1.188e-01  5.689e-02   2.088 0.036847 *  
## stat81       7.690e-02  5.687e-02   1.352 0.176375    
## stat82       1.786e-02  5.613e-02   0.318 0.750369    
## stat83      -1.020e-01  5.654e-02  -1.804 0.071254 .  
## stat84      -1.645e-02  5.686e-02  -0.289 0.772298    
## stat85      -8.330e-02  5.650e-02  -1.474 0.140467    
## stat86       7.068e-02  5.653e-02   1.250 0.211253    
## stat87      -1.030e-01  5.709e-02  -1.805 0.071205 .  
## stat88       4.346e-03  5.605e-02   0.078 0.938190    
## stat89       2.349e-02  5.650e-02   0.416 0.677597    
## stat90      -4.190e-02  5.666e-02  -0.740 0.459612    
## stat91      -9.869e-02  5.659e-02  -1.744 0.081239 .  
## stat92      -9.736e-02  5.656e-02  -1.721 0.085234 .  
## stat93       9.146e-03  5.746e-02   0.159 0.873534    
## stat94       2.590e-03  5.635e-02   0.046 0.963341    
## stat95       4.993e-02  5.662e-02   0.882 0.377857    
## stat96      -8.883e-02  5.674e-02  -1.565 0.117535    
## stat97       5.612e-02  5.628e-02   0.997 0.318721    
## stat98       9.570e-01  5.581e-02  17.147  < 2e-16 ***
## stat99       1.007e-01  5.676e-02   1.775 0.076030 .  
## stat100      2.060e-01  5.682e-02   3.625 0.000291 ***
## stat101      1.161e-02  5.699e-02   0.204 0.838640    
## stat102      5.043e-02  5.659e-02   0.891 0.372877    
## stat103     -1.002e-01  5.735e-02  -1.748 0.080525 .  
## stat104     -2.431e-02  5.650e-02  -0.430 0.667019    
## stat105      5.114e-02  5.630e-02   0.908 0.363700    
## stat106     -1.161e-01  5.635e-02  -2.060 0.039422 *  
## stat107      2.936e-02  5.664e-02   0.518 0.604196    
## stat108     -2.173e-02  5.684e-02  -0.382 0.702281    
## stat109      2.337e-02  5.644e-02   0.414 0.678822    
## stat110     -8.765e-01  5.621e-02 -15.595  < 2e-16 ***
## stat111      1.941e-02  5.682e-02   0.342 0.732690    
## stat112      4.378e-02  5.702e-02   0.768 0.442654    
## stat113      1.286e-02  5.723e-02   0.225 0.822204    
## stat114      4.387e-03  5.670e-02   0.077 0.938333    
## stat115      1.714e-02  5.667e-02   0.302 0.762353    
## stat116      4.712e-03  5.697e-02   0.083 0.934088    
## stat117      4.790e-02  5.677e-02   0.844 0.398853    
## stat118      1.369e-02  5.619e-02   0.244 0.807595    
## stat119      6.867e-02  5.621e-02   1.222 0.221872    
## stat120      1.861e-02  5.614e-02   0.332 0.740276    
## stat121     -1.926e-02  5.669e-02  -0.340 0.733994    
## stat122     -5.576e-02  5.632e-02  -0.990 0.322162    
## stat123      6.433e-02  5.698e-02   1.129 0.258996    
## stat124     -5.398e-02  5.651e-02  -0.955 0.339476    
## stat125      7.872e-03  5.655e-02   0.139 0.889301    
## stat126      7.442e-02  5.648e-02   1.318 0.187710    
## stat127     -5.734e-02  5.631e-02  -1.018 0.308594    
## stat128     -1.552e-01  5.631e-02  -2.756 0.005864 ** 
## stat129     -7.582e-02  5.602e-02  -1.353 0.175953    
## stat130      6.331e-02  5.666e-02   1.117 0.263935    
## stat131      5.242e-02  5.657e-02   0.927 0.354167    
## stat132      2.324e-02  5.622e-02   0.413 0.679385    
## stat133      3.119e-02  5.679e-02   0.549 0.582861    
## stat134     -4.484e-02  5.632e-02  -0.796 0.425936    
## stat135     -9.357e-02  5.656e-02  -1.654 0.098145 .  
## stat136     -1.189e-02  5.660e-02  -0.210 0.833664    
## stat137     -6.399e-02  5.620e-02  -1.139 0.254919    
## stat138      6.995e-03  5.669e-02   0.123 0.901809    
## stat139     -3.478e-02  5.697e-02  -0.610 0.541571    
## stat140     -1.041e-03  5.595e-02  -0.019 0.985157    
## stat141      2.275e-03  5.604e-02   0.041 0.967622    
## stat142      5.783e-03  5.738e-02   0.101 0.919733    
## stat143     -1.414e-02  5.692e-02  -0.248 0.803847    
## stat144      1.264e-01  5.633e-02   2.245 0.024824 *  
## stat145     -2.727e-02  5.740e-02  -0.475 0.634800    
## stat146     -1.437e-01  5.697e-02  -2.523 0.011676 *  
## stat147     -3.730e-02  5.754e-02  -0.648 0.516891    
## stat148     -3.522e-02  5.611e-02  -0.628 0.530215    
## stat149     -1.504e-01  5.695e-02  -2.640 0.008312 ** 
## stat150     -6.953e-03  5.685e-02  -0.122 0.902670    
## stat151     -1.010e-02  5.708e-02  -0.177 0.859553    
## stat152     -4.102e-02  5.593e-02  -0.733 0.463391    
## stat153      1.124e-01  5.725e-02   1.963 0.049648 *  
## stat154      1.379e-02  5.698e-02   0.242 0.808765    
## stat155      5.200e-02  5.653e-02   0.920 0.357684    
## stat156      1.595e-01  5.698e-02   2.799 0.005152 ** 
## stat157     -2.478e-02  5.631e-02  -0.440 0.659865    
## stat158      6.226e-02  5.699e-02   1.093 0.274657    
## stat159     -3.028e-02  5.649e-02  -0.536 0.591964    
## stat160      5.717e-02  5.674e-02   1.008 0.313674    
## stat161      1.543e-02  5.708e-02   0.270 0.786911    
## stat162      1.620e-02  5.625e-02   0.288 0.773367    
## stat163      5.557e-02  5.722e-02   0.971 0.331537    
## stat164      1.066e-02  5.693e-02   0.187 0.851480    
## stat165      6.126e-03  5.587e-02   0.110 0.912686    
## stat166     -6.989e-03  5.596e-02  -0.125 0.900609    
## stat167     -4.880e-02  5.644e-02  -0.864 0.387358    
## stat168     -4.721e-03  5.639e-02  -0.084 0.933274    
## stat169      1.377e-02  5.685e-02   0.242 0.808643    
## stat170     -6.810e-03  5.644e-02  -0.121 0.903960    
## stat171     -3.359e-02  5.692e-02  -0.590 0.555093    
## stat172      1.552e-01  5.627e-02   2.759 0.005820 ** 
## stat173     -5.526e-03  5.638e-02  -0.098 0.921925    
## stat174      8.405e-02  5.673e-02   1.482 0.138514    
## stat175     -5.982e-02  5.689e-02  -1.052 0.293027    
## stat176     -1.567e-02  5.635e-02  -0.278 0.780907    
## stat177     -2.701e-02  5.653e-02  -0.478 0.632867    
## stat178     -3.555e-02  5.733e-02  -0.620 0.535208    
## stat179      1.319e-02  5.649e-02   0.233 0.815412    
## stat180      1.280e-02  5.653e-02   0.227 0.820818    
## stat181      4.074e-02  5.701e-02   0.715 0.474905    
## stat182      2.430e-02  5.682e-02   0.428 0.668867    
## stat183      2.400e-02  5.654e-02   0.425 0.671167    
## stat184      6.090e-02  5.702e-02   1.068 0.285583    
## stat185      1.438e-02  5.613e-02   0.256 0.797846    
## stat186      3.119e-02  5.697e-02   0.548 0.584045    
## stat187     -5.633e-02  5.634e-02  -1.000 0.317488    
## stat188      3.122e-02  5.636e-02   0.554 0.579679    
## stat189     -1.384e-02  5.673e-02  -0.244 0.807189    
## stat190     -3.895e-02  5.641e-02  -0.691 0.489892    
## stat191     -6.925e-02  5.672e-02  -1.221 0.222196    
## stat192      3.332e-02  5.737e-02   0.581 0.561425    
## stat193     -3.352e-02  5.744e-02  -0.584 0.559534    
## stat194     -1.533e-02  5.660e-02  -0.271 0.786562    
## stat195      4.372e-02  5.668e-02   0.771 0.440490    
## stat196     -1.121e-01  5.731e-02  -1.957 0.050443 .  
## stat197      1.283e-02  5.600e-02   0.229 0.818858    
## stat198     -1.345e-02  5.692e-02  -0.236 0.813224    
## stat199      4.462e-02  5.609e-02   0.795 0.426419    
## stat200     -4.998e-02  5.639e-02  -0.886 0.375539    
## stat201      5.954e-02  5.649e-02   1.054 0.291978    
## stat202      4.380e-02  5.738e-02   0.763 0.445299    
## stat203      5.013e-03  5.641e-02   0.089 0.929183    
## stat204      9.333e-04  5.629e-02   0.017 0.986771    
## stat205     -2.797e-02  5.590e-02  -0.500 0.616887    
## stat206      4.053e-03  5.705e-02   0.071 0.943371    
## stat207      1.066e-01  5.658e-02   1.885 0.059547 .  
## stat208      7.693e-02  5.683e-02   1.354 0.175909    
## stat209     -7.615e-02  5.598e-02  -1.360 0.173763    
## stat210     -1.174e-01  5.694e-02  -2.062 0.039271 *  
## stat211     -5.320e-02  5.656e-02  -0.941 0.346899    
## stat212      1.053e-01  5.685e-02   1.853 0.063992 .  
## stat213     -4.642e-02  5.708e-02  -0.813 0.416068    
## stat214     -5.050e-02  5.640e-02  -0.895 0.370679    
## stat215     -6.850e-02  5.671e-02  -1.208 0.227136    
## stat216     -7.457e-02  5.619e-02  -1.327 0.184565    
## stat217      7.166e-02  5.705e-02   1.256 0.209125    
## sqrt.x18     7.293e+00  2.169e-01  33.628  < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 7.243 on 5458 degrees of freedom
## Multiple R-squared:  0.3597, Adjusted R-squared:  0.3316 
## F-statistic: 12.78 on 240 and 5458 DF,  p-value: < 2.2e-16
cd.full2 = plot.diagnostics(model.full2, data.train2)

## [1] "Number of data points that have Cook's D > 4/n: 317"
## [1] "Number of data points that have Cook's D > 1: 0"
# much more normal residuals than before. 
# See if you can check the distribution (boxplots) of the high leverage points and the other points
# High Leverage Plot MMORO ### 
plotData = data.train %>% 
  rownames_to_column() %>%
  mutate(type=ifelse(rowname %in% high.cd,'High','Normal')) %>%
  dplyr::select(type,target=one_of(label.names))

ggplot(data=plotData, aes(x=type,y=target)) +
  geom_boxplot(fill='light blue',outlier.shape=NA) +
  scale_y_continuous(name="Target Variable Values") +
  theme_light() +
  ggtitle('Distribution of High Leverage Points and Normal  Points')

model.null = lm(grand.mean.formula, data.train)
# summary(model.null)
# plot.diagnostics(model.null, data.train)
model.null2 = lm(grand.mean.formula, data.train2)
# summary(model.null2)
# plot.diagnostics(model.null2, data.train2)

Variable Selection

Basic: http://www.stat.columbia.edu/~martin/W2024/R10.pdf Cross Validation + Other Metrics: http://www.sthda.com/english/articles/37-model-selection-essentials-in-r/154-stepwise-regression-essentials-in-r/

Forward Selection (w/ full train)

Train

if (algo.forward == TRUE){
  t1 = Sys.time()
  
  model.forward = step(model.null, scope=list(lower=model.null, upper=model.full), direction="forward", trace = 0)
  print(summary(model.forward))
  #saveRDS(model.forward,file = "model_forward.rds")
  
  t2 = Sys.time()
  print (paste("Time taken for Forward Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.forward, data.train)
}

Test

if (algo.forward == TRUE){
  test.model(model.forward, data.test, "Forward Selection")
}

Forward Selection (w/ filtered train)

Train

if (algo.forward == TRUE){
  t1 = Sys.time()
  
  model.forward2 = step(model.null2, scope=list(lower=model.null2, upper=model.full2), direction="forward", trace = 0)
  print(summary(model.forward2))
  #saveRDS(model.forward,file = "model_forward.rds")
  
  t2 = Sys.time()
  print (paste("Time taken for Forward Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.forward2, data.train2)
}

Test

if (algo.forward == TRUE){
  test.model(model.forward2, data.test, "Forward Selection (2)")
}

Forward Selection with CV (w/ full train)

Train

if (algo.forward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   , data = data.train
                                   , method = "leapForward"
                                   , feature.names = feature.names)
  model.forward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 9 on full training set
##     nvmax      RMSE  Rsquared      MAE    RMSESD RsquaredSD     MAESD
## 1       1 10.190312 0.1031351 7.782556 0.2986960 0.01510985 0.1630465
## 2       2  9.959863 0.1431490 7.586895 0.3329232 0.01527567 0.1507478
## 3       3  9.800520 0.1706239 7.425932 0.3371193 0.02089715 0.1510136
## 4       4  9.664292 0.1935352 7.225063 0.3244736 0.02143874 0.1193851
## 5       5  9.581672 0.2073775 7.156896 0.3424689 0.02081535 0.1340120
## 6       6  9.555257 0.2117661 7.137916 0.3542029 0.02335306 0.1291911
## 7       7  9.559823 0.2111601 7.139880 0.3644482 0.02461551 0.1382767
## 8       8  9.547579 0.2133689 7.128788 0.3651827 0.02538635 0.1329768
## 9       9  9.521929 0.2176251 7.121917 0.3669530 0.02593030 0.1336222
## 10     10  9.527194 0.2165947 7.122196 0.3669707 0.02380088 0.1352945
## 11     11  9.533718 0.2154997 7.126706 0.3659198 0.02332300 0.1364556
## 12     12  9.541785 0.2142511 7.134061 0.3701140 0.02326252 0.1373844
## 13     13  9.534556 0.2154392 7.126870 0.3663178 0.02296114 0.1288239
## 14     14  9.536855 0.2150780 7.132556 0.3624797 0.02219343 0.1294582
## 15     15  9.544673 0.2138847 7.140628 0.3656304 0.02279245 0.1329859
## 16     16  9.548190 0.2133363 7.143860 0.3634162 0.02313565 0.1290948
## 17     17  9.547461 0.2135171 7.142876 0.3634191 0.02422124 0.1328883
## 18     18  9.546594 0.2136676 7.135894 0.3605831 0.02424289 0.1301032
## 19     19  9.553059 0.2127053 7.138343 0.3651610 0.02352383 0.1367271
## 20     20  9.555736 0.2122956 7.140058 0.3743162 0.02413164 0.1405056
## 21     21  9.561053 0.2114441 7.140462 0.3671524 0.02313402 0.1393820
## 22     22  9.567029 0.2104881 7.143979 0.3613730 0.02287908 0.1367451
## 23     23  9.569907 0.2100712 7.145186 0.3597307 0.02282651 0.1348296
## 24     24  9.568610 0.2102891 7.146945 0.3559698 0.02231344 0.1286278
## 25     25  9.570651 0.2100758 7.150189 0.3612367 0.02278664 0.1332887
## 26     26  9.577704 0.2090346 7.154186 0.3580719 0.02246616 0.1293691
## 27     27  9.585817 0.2078512 7.157706 0.3552423 0.02238196 0.1206788
## 28     28  9.585758 0.2078746 7.161134 0.3498644 0.02133356 0.1182772
## 29     29  9.580744 0.2087338 7.162245 0.3521365 0.02106913 0.1266043
## 30     30  9.584115 0.2083005 7.159470 0.3571480 0.02166722 0.1264669
## 31     31  9.584694 0.2083126 7.156782 0.3590226 0.02226605 0.1281233
## 32     32  9.591922 0.2072105 7.160248 0.3560463 0.02219070 0.1282975
## 33     33  9.591912 0.2071763 7.161583 0.3523344 0.02191520 0.1262565
## 34     34  9.594758 0.2067533 7.160464 0.3533566 0.02244919 0.1275399
## 35     35  9.596076 0.2065766 7.163635 0.3554220 0.02232660 0.1329547
## 36     36  9.601605 0.2058608 7.166604 0.3599576 0.02313390 0.1352185
## 37     37  9.601458 0.2058715 7.164302 0.3620149 0.02284633 0.1355047
## 38     38  9.606253 0.2051556 7.168000 0.3628587 0.02337773 0.1359634
## 39     39  9.613165 0.2041734 7.172073 0.3637382 0.02395070 0.1336498
## 40     40  9.612140 0.2043253 7.171335 0.3657131 0.02430348 0.1358571
## 41     41  9.614169 0.2040861 7.172957 0.3657866 0.02428820 0.1331279
## 42     42  9.618102 0.2034609 7.175862 0.3652833 0.02432718 0.1301330
## 43     43  9.617503 0.2035606 7.175770 0.3625543 0.02352357 0.1296137
## 44     44  9.622275 0.2028919 7.179171 0.3649725 0.02388955 0.1296032
## 45     45  9.623562 0.2026764 7.181906 0.3645599 0.02389107 0.1302145
## 46     46  9.624637 0.2025793 7.184697 0.3657331 0.02455393 0.1307823
## 47     47  9.626766 0.2023368 7.184396 0.3606111 0.02420667 0.1287695
## 48     48  9.629852 0.2019212 7.186625 0.3661030 0.02460895 0.1324383
## 49     49  9.631442 0.2017224 7.187370 0.3677386 0.02487627 0.1343018
## 50     50  9.633429 0.2014822 7.189338 0.3689351 0.02514875 0.1361730
## 51     51  9.633500 0.2014781 7.187020 0.3727567 0.02492024 0.1406510
## 52     52  9.633858 0.2014100 7.188314 0.3719338 0.02500061 0.1430702
## 53     53  9.635954 0.2011123 7.190101 0.3722002 0.02520282 0.1456194
## 54     54  9.634913 0.2013316 7.190383 0.3762696 0.02490972 0.1469680
## 55     55  9.639867 0.2005876 7.194815 0.3745718 0.02458046 0.1462264
## 56     56  9.640370 0.2005462 7.195547 0.3747820 0.02447106 0.1450342
## 57     57  9.640278 0.2006251 7.194436 0.3772459 0.02497970 0.1476734
## 58     58  9.640730 0.2006538 7.192236 0.3799309 0.02538957 0.1484714
## 59     59  9.643140 0.2003552 7.191819 0.3797841 0.02568398 0.1456626
## 60     60  9.646030 0.1999172 7.193926 0.3761683 0.02544456 0.1447778
## 61     61  9.645396 0.2000499 7.192895 0.3788296 0.02577229 0.1479905
## 62     62  9.645775 0.2000357 7.192688 0.3806347 0.02638377 0.1504098
## 63     63  9.648081 0.1997552 7.193887 0.3832513 0.02664467 0.1499468
## 64     64  9.648056 0.1997535 7.195396 0.3834703 0.02653265 0.1515904
## 65     65  9.653291 0.1990229 7.200218 0.3843425 0.02664970 0.1486311
## 66     66  9.655890 0.1986798 7.201645 0.3838828 0.02675754 0.1485676
## 67     67  9.660885 0.1979388 7.204202 0.3818752 0.02672453 0.1450314
## 68     68  9.661112 0.1979223 7.205839 0.3832127 0.02679069 0.1453881
## 69     69  9.662726 0.1976998 7.207036 0.3830148 0.02677433 0.1483025
## 70     70  9.661397 0.1979233 7.208805 0.3843482 0.02712970 0.1488407
## 71     71  9.657704 0.1985033 7.206002 0.3832684 0.02765511 0.1485234
## 72     72  9.659117 0.1983164 7.206804 0.3805959 0.02737396 0.1451662
## 73     73  9.660051 0.1982110 7.207478 0.3781174 0.02696333 0.1461217
## 74     74  9.656189 0.1988211 7.205106 0.3750728 0.02697428 0.1444182
## 75     75  9.657752 0.1986187 7.206594 0.3726178 0.02682895 0.1406861
## 76     76  9.656887 0.1987346 7.207392 0.3731074 0.02724559 0.1429581
## 77     77  9.657065 0.1987327 7.208809 0.3769309 0.02777211 0.1462181
## 78     78  9.654443 0.1991392 7.207076 0.3755743 0.02740035 0.1450983
## 79     79  9.653936 0.1992575 7.205748 0.3760624 0.02714004 0.1472669
## 80     80  9.655731 0.1990162 7.207818 0.3759409 0.02715385 0.1466851
## 81     81  9.656612 0.1989131 7.212387 0.3751331 0.02721510 0.1457882
## 82     82  9.657930 0.1986935 7.212153 0.3728322 0.02704303 0.1447523
## 83     83  9.656148 0.1990423 7.210242 0.3744432 0.02708590 0.1450533
## 84     84  9.657864 0.1987719 7.211229 0.3729513 0.02752105 0.1445157
## 85     85  9.656558 0.1989565 7.211668 0.3734294 0.02742783 0.1452895
## 86     86  9.659612 0.1985154 7.212725 0.3731781 0.02725987 0.1455269
## 87     87  9.659710 0.1985069 7.213575 0.3708576 0.02718415 0.1413769
## 88     88  9.659733 0.1985202 7.215472 0.3715364 0.02720419 0.1428229
## 89     89  9.660332 0.1984327 7.216783 0.3748874 0.02691612 0.1466108
## 90     90  9.662418 0.1981178 7.219421 0.3736731 0.02643426 0.1461225
## 91     91  9.664895 0.1978136 7.222656 0.3741156 0.02650486 0.1475062
## 92     92  9.666222 0.1976615 7.223575 0.3744376 0.02680975 0.1480623
## 93     93  9.665207 0.1978235 7.223310 0.3740169 0.02652000 0.1476459
## 94     94  9.665527 0.1977830 7.223259 0.3739703 0.02639028 0.1468178
## 95     95  9.666274 0.1977068 7.223393 0.3742601 0.02680615 0.1467439
## 96     96  9.668904 0.1973610 7.226200 0.3766940 0.02715291 0.1487929
## 97     97  9.670021 0.1972349 7.229517 0.3759462 0.02698818 0.1512850
## 98     98  9.669770 0.1972726 7.228988 0.3746698 0.02684557 0.1487039
## 99     99  9.667566 0.1976221 7.227026 0.3724769 0.02666623 0.1461540
## 100   100  9.669438 0.1972805 7.227301 0.3687506 0.02627433 0.1413936
## 101   101  9.668415 0.1974428 7.225324 0.3678849 0.02603839 0.1418567
## 102   102  9.670006 0.1972481 7.226519 0.3693829 0.02618033 0.1416083
## 103   103  9.670609 0.1971385 7.226098 0.3675273 0.02602361 0.1426602
## 104   104  9.671923 0.1969543 7.226936 0.3687076 0.02625159 0.1440590
## 105   105  9.674497 0.1965847 7.229092 0.3698114 0.02628531 0.1446997
## 106   106  9.676923 0.1962206 7.230791 0.3713623 0.02611367 0.1428147
## 107   107  9.675875 0.1963902 7.230901 0.3728132 0.02597156 0.1449720
## 108   108  9.674884 0.1965531 7.230305 0.3725895 0.02621726 0.1427636
## 109   109  9.675274 0.1964821 7.231180 0.3704748 0.02597504 0.1405073
## 110   110  9.674544 0.1965813 7.229658 0.3713310 0.02621712 0.1408943
## 111   111  9.677012 0.1962342 7.232546 0.3717977 0.02611334 0.1433704
## 112   112  9.674944 0.1965438 7.230859 0.3740123 0.02601916 0.1449972
## 113   113  9.676475 0.1963464 7.231152 0.3736176 0.02630214 0.1456916
## 114   114  9.677576 0.1962070 7.231732 0.3712863 0.02605806 0.1435913
## 115   115  9.677012 0.1963460 7.231458 0.3728400 0.02592823 0.1444111
## 116   116  9.676276 0.1964540 7.229905 0.3725118 0.02585930 0.1446005
## 117   117  9.678735 0.1961191 7.233395 0.3719715 0.02588181 0.1451454
## 118   118  9.680218 0.1959363 7.234785 0.3723326 0.02576141 0.1472421
## 119   119  9.681827 0.1957305 7.235779 0.3729595 0.02570926 0.1470729
## 120   120  9.682841 0.1956245 7.235084 0.3747620 0.02574213 0.1465332
## 121   121  9.680165 0.1960285 7.232991 0.3749349 0.02578371 0.1467600
## 122   122  9.680338 0.1959818 7.233852 0.3740434 0.02575467 0.1455684
## 123   123  9.679851 0.1960386 7.232884 0.3739309 0.02590075 0.1456088
## 124   124  9.679517 0.1960630 7.234197 0.3732653 0.02589064 0.1441667
## 125   125  9.679797 0.1960469 7.232572 0.3745588 0.02597541 0.1452430
## 126   126  9.681696 0.1957838 7.235384 0.3751557 0.02618535 0.1486945
## 127   127  9.683066 0.1955898 7.236222 0.3769115 0.02638189 0.1495159
## 128   128  9.683244 0.1955838 7.236174 0.3766098 0.02639277 0.1475455
## 129   129  9.682156 0.1957152 7.234992 0.3761024 0.02617242 0.1477342
## 130   130  9.679873 0.1960749 7.232954 0.3777093 0.02655298 0.1473652
## 131   131  9.680832 0.1959415 7.233239 0.3783178 0.02652563 0.1483980
## 132   132  9.682183 0.1957413 7.234667 0.3772545 0.02612950 0.1481081
## 133   133  9.682852 0.1956337 7.235687 0.3772961 0.02598661 0.1484968
## 134   134  9.682135 0.1957408 7.234182 0.3767895 0.02578065 0.1471160
## 135   135  9.681984 0.1957749 7.234144 0.3769634 0.02603126 0.1461319
## 136   136  9.682092 0.1957888 7.233458 0.3785924 0.02584179 0.1472106
## 137   137  9.680633 0.1959767 7.233465 0.3755268 0.02557742 0.1442306
## 138   138  9.682240 0.1957699 7.234706 0.3765690 0.02566859 0.1464049
## 139   139  9.681407 0.1959216 7.234710 0.3765648 0.02578628 0.1479266
## 140   140  9.682422 0.1957832 7.234829 0.3742799 0.02576371 0.1478646
## 141   141  9.682320 0.1958055 7.234153 0.3741008 0.02572093 0.1474447
## 142   142  9.683602 0.1956211 7.236086 0.3745302 0.02602403 0.1466535
## 143   143  9.683360 0.1956865 7.236106 0.3755910 0.02603166 0.1478499
## 144   144  9.681895 0.1958923 7.236474 0.3753036 0.02581147 0.1466070
## 145   145  9.681408 0.1959560 7.236027 0.3759618 0.02567959 0.1471163
## 146   146  9.680240 0.1961188 7.235564 0.3762055 0.02571366 0.1473606
## 147   147  9.681180 0.1959886 7.236953 0.3750248 0.02580177 0.1473971
## 148   148  9.680989 0.1960258 7.236346 0.3762565 0.02595279 0.1476328
## 149   149  9.680616 0.1961022 7.236221 0.3757605 0.02589842 0.1481153
## 150   150  9.680501 0.1961302 7.236832 0.3762366 0.02578039 0.1488214
## 151   151  9.680096 0.1961836 7.237687 0.3769730 0.02586685 0.1497990
## 152   152  9.681319 0.1960228 7.237745 0.3776543 0.02589516 0.1509132
## 153   153  9.681315 0.1960128 7.238560 0.3769734 0.02572953 0.1510188
## 154   154  9.682736 0.1958313 7.238897 0.3776345 0.02577622 0.1515574
## 155   155  9.683671 0.1956986 7.239102 0.3781517 0.02591398 0.1526464
## 156   156  9.682044 0.1959471 7.236943 0.3797238 0.02607105 0.1518539
## 157   157  9.681826 0.1959812 7.237306 0.3795880 0.02593964 0.1530218
## 158   158  9.682366 0.1959083 7.237584 0.3793416 0.02595388 0.1532978
## 159   159  9.682142 0.1959360 7.237522 0.3797319 0.02591756 0.1555065
## 160   160  9.683183 0.1958027 7.238487 0.3811166 0.02606500 0.1575206
## 161   161  9.682619 0.1958709 7.237839 0.3812803 0.02598641 0.1572813
## 162   162  9.682657 0.1958697 7.238062 0.3826543 0.02610187 0.1580685
## 163   163  9.684104 0.1956679 7.239605 0.3832054 0.02611777 0.1584951
## 164   164  9.684468 0.1956153 7.240093 0.3844726 0.02628972 0.1594596
## 165   165  9.685239 0.1955401 7.240834 0.3861512 0.02649717 0.1617747
## 166   166  9.685611 0.1954958 7.240482 0.3868206 0.02652134 0.1622021
## 167   167  9.686584 0.1953500 7.241813 0.3864569 0.02641284 0.1622098
## 168   168  9.686982 0.1952818 7.242410 0.3858577 0.02649058 0.1617837
## 169   169  9.686765 0.1953150 7.242031 0.3865102 0.02651786 0.1618337
## 170   170  9.686280 0.1953863 7.241232 0.3862339 0.02652712 0.1623714
## 171   171  9.687362 0.1952295 7.242423 0.3862484 0.02647315 0.1625833
## 172   172  9.687044 0.1952997 7.242323 0.3869297 0.02655304 0.1629222
## 173   173  9.686713 0.1953475 7.243110 0.3860050 0.02640683 0.1625164
## 174   174  9.687112 0.1952805 7.242960 0.3848697 0.02624733 0.1609496
## 175   175  9.686989 0.1952873 7.243054 0.3859782 0.02638948 0.1610834
## 176   176  9.686239 0.1954047 7.242812 0.3854318 0.02633570 0.1619176
## 177   177  9.686351 0.1953923 7.242835 0.3863048 0.02644548 0.1626731
## 178   178  9.685664 0.1955017 7.242902 0.3868096 0.02639358 0.1627857
## 179   179  9.686208 0.1954293 7.242615 0.3863294 0.02639091 0.1625460
## 180   180  9.686355 0.1954096 7.243495 0.3867918 0.02651731 0.1629684
## 181   181  9.686808 0.1953423 7.243515 0.3874856 0.02654529 0.1633505
## 182   182  9.686790 0.1953524 7.243981 0.3872055 0.02653180 0.1629637
## 183   183  9.686817 0.1953382 7.243777 0.3883113 0.02658392 0.1633002
## 184   184  9.687119 0.1952944 7.243958 0.3885119 0.02661661 0.1639789
## 185   185  9.687813 0.1951927 7.243795 0.3884990 0.02649325 0.1633818
## 186   186  9.688211 0.1951377 7.244252 0.3882137 0.02658535 0.1634145
## 187   187  9.688021 0.1951567 7.244709 0.3883256 0.02647927 0.1635080
## 188   188  9.687956 0.1951555 7.244578 0.3881964 0.02650465 0.1627712
## 189   189  9.688004 0.1951603 7.244425 0.3889150 0.02663579 0.1625926
## 190   190  9.688076 0.1951638 7.244673 0.3886770 0.02667861 0.1631713
## 191   191  9.688152 0.1951387 7.244515 0.3884012 0.02660577 0.1628320
## 192   192  9.688401 0.1950949 7.244572 0.3891510 0.02652817 0.1629919
## 193   193  9.687904 0.1951694 7.244322 0.3892888 0.02649695 0.1622726
## 194   194  9.687649 0.1951928 7.244775 0.3883878 0.02640326 0.1616146
## 195   195  9.688339 0.1950877 7.245150 0.3883161 0.02639373 0.1613334
## 196   196  9.688866 0.1950068 7.245723 0.3872090 0.02626664 0.1603593
## 197   197  9.688727 0.1950292 7.245219 0.3877946 0.02633177 0.1608195
## 198   198  9.688526 0.1950561 7.245291 0.3886547 0.02647466 0.1616773
## 199   199  9.688780 0.1950051 7.245503 0.3880839 0.02639268 0.1614288
## 200   200  9.688499 0.1950587 7.245310 0.3883286 0.02642996 0.1617848
## 201   201  9.687820 0.1951452 7.245034 0.3882991 0.02640802 0.1614124
## 202   202  9.687569 0.1951773 7.244901 0.3878133 0.02637511 0.1611594
## 203   203  9.686927 0.1952879 7.244520 0.3879466 0.02633738 0.1610318
## 204   204  9.686414 0.1953600 7.244124 0.3883948 0.02626145 0.1618459
## 205   205  9.687399 0.1952105 7.245148 0.3883870 0.02625712 0.1618595
## 206   206  9.687710 0.1951654 7.245192 0.3887386 0.02632865 0.1621402
## 207   207  9.688172 0.1950941 7.245498 0.3890626 0.02626465 0.1621547
## 208   208  9.688607 0.1950285 7.245797 0.3882448 0.02615836 0.1612334
## 209   209  9.689119 0.1949577 7.246420 0.3884732 0.02616674 0.1614659
## 210   210  9.689269 0.1949389 7.246530 0.3886716 0.02617977 0.1621111
## 211   211  9.689826 0.1948728 7.247238 0.3892447 0.02630422 0.1624583
## 212   212  9.689435 0.1949366 7.246922 0.3893973 0.02636018 0.1625413
## 213   213  9.689538 0.1949256 7.247005 0.3894320 0.02637767 0.1625640
## 214   214  9.689415 0.1949425 7.246903 0.3896395 0.02636160 0.1627810
## 215   215  9.689401 0.1949461 7.246948 0.3896609 0.02639384 0.1630768
## 216   216  9.689344 0.1949616 7.246815 0.3899756 0.02644800 0.1631647
## 217   217  9.689429 0.1949494 7.246887 0.3898691 0.02639327 0.1629683
## 218   218  9.689373 0.1949570 7.246839 0.3900079 0.02637740 0.1633455
## 219   219  9.689319 0.1949659 7.246775 0.3899994 0.02635385 0.1632509
## 220   220  9.689599 0.1949281 7.246868 0.3903022 0.02639606 0.1634259
## 221   221  9.689639 0.1949253 7.246781 0.3905719 0.02643279 0.1637414
## 222   222  9.689458 0.1949562 7.246537 0.3907013 0.02646966 0.1639004
## 223   223  9.689571 0.1949439 7.246763 0.3908481 0.02649005 0.1639418
## 224   224  9.689345 0.1949736 7.246635 0.3909025 0.02648784 0.1639960
## 225   225  9.689357 0.1949749 7.246645 0.3907742 0.02647186 0.1637769
## 226   226  9.689329 0.1949781 7.246578 0.3907709 0.02646672 0.1637307
## 227   227  9.689298 0.1949824 7.246563 0.3908538 0.02647663 0.1637775
## 228   228  9.689258 0.1949882 7.246508 0.3908131 0.02647922 0.1638110
## 229   229  9.689107 0.1950106 7.246410 0.3908175 0.02649283 0.1637711
## 230   230  9.689072 0.1950141 7.246445 0.3908223 0.02647661 0.1638339
## 231   231  9.689184 0.1949993 7.246531 0.3908957 0.02648865 0.1639251
## 232   232  9.689154 0.1950023 7.246441 0.3908435 0.02647868 0.1638115
## 233   233  9.689027 0.1950207 7.246308 0.3908113 0.02647664 0.1636636
## 234   234  9.689106 0.1950121 7.246375 0.3909201 0.02649116 0.1637975
## 235   235  9.689050 0.1950188 7.246326 0.3908231 0.02648090 0.1637609
## 236   236  9.689049 0.1950187 7.246335 0.3908174 0.02648176 0.1637727
## 237   237  9.689064 0.1950160 7.246321 0.3908130 0.02648343 0.1636916
## 238   238  9.689039 0.1950197 7.246260 0.3907944 0.02648188 0.1636317
## 239   239  9.689024 0.1950218 7.246221 0.3907945 0.02647965 0.1636074
## 240   240  9.689033 0.1950205 7.246237 0.3907893 0.02647943 0.1636052
##   nvmax
## 9     9
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Coefficients of final model:

## (Intercept)          x4          x7          x9         x10         x16 
##  96.8126929  -0.0150172   3.1893069   0.9833780   0.3546913   0.2736344 
##         x17      stat98     stat110    sqrt.x18 
##   0.4098622   1.0203902  -0.9352919   7.5838139

Test

if (algo.forward.caret == TRUE){
    test.model(model.forward, data.test
             ,method = 'leapForward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   109.0   121.5   125.1   125.2   128.9   139.6 
## [1] "leapForward  Test MSE: 93.5015990422204"

Forward Selection with CV (w/ filtered train)

Train

if (algo.forward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method =  "leapForward"
                                   ,feature.names = feature.names)
  model.forward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 12 on full training set
##     nvmax     RMSE  Rsquared      MAE    RMSESD RsquaredSD     MAESD
## 1       1 8.210706 0.1435527 6.621521 0.2159950 0.04104185 0.1629293
## 2       2 7.907521 0.2044789 6.401711 0.1810466 0.03408236 0.1252524
## 3       3 7.730850 0.2397063 6.231490 0.1702484 0.03156176 0.1466516
## 4       4 7.579309 0.2686629 6.047799 0.1662627 0.02641710 0.1393506
## 5       5 7.472423 0.2890728 5.963578 0.1893338 0.02674589 0.1533025
## 6       6 7.429208 0.2974001 5.937806 0.1973226 0.03031935 0.1632963
## 7       7 7.421047 0.2990612 5.944857 0.2024926 0.03113068 0.1582367
## 8       8 7.405777 0.3017923 5.945689 0.2200323 0.02788967 0.1637757
## 9       9 7.381234 0.3064082 5.925445 0.2139996 0.02569411 0.1632100
## 10     10 7.355087 0.3112072 5.905256 0.2165269 0.02544152 0.1609382
## 11     11 7.347002 0.3127548 5.898460 0.2124384 0.02549727 0.1574845
## 12     12 7.323722 0.3170669 5.885196 0.2180591 0.02639073 0.1655038
## 13     13 7.335574 0.3148895 5.891887 0.2195827 0.02640011 0.1674149
## 14     14 7.345672 0.3131137 5.900332 0.2103454 0.02695146 0.1597171
## 15     15 7.352930 0.3117901 5.905366 0.2127308 0.02664154 0.1632576
## 16     16 7.349612 0.3123829 5.899477 0.2079828 0.02672457 0.1604098
## 17     17 7.350855 0.3121643 5.903189 0.2077732 0.02674355 0.1594806
## 18     18 7.347928 0.3127276 5.900945 0.2039758 0.02683764 0.1552881
## 19     19 7.345422 0.3131971 5.901939 0.2035440 0.02681367 0.1547685
## 20     20 7.345476 0.3131817 5.901046 0.2038044 0.02736545 0.1574471
## 21     21 7.349809 0.3124226 5.905339 0.2068412 0.02796785 0.1555892
## 22     22 7.353301 0.3117828 5.910233 0.2077222 0.02796162 0.1567299
## 23     23 7.350447 0.3122982 5.907742 0.2082143 0.02733948 0.1608305
## 24     24 7.351292 0.3121751 5.908418 0.1977818 0.02641761 0.1535538
## 25     25 7.348412 0.3127932 5.908860 0.1973640 0.02721450 0.1578856
## 26     26 7.346088 0.3131915 5.906595 0.2023151 0.02594672 0.1622380
## 27     27 7.341448 0.3140556 5.902763 0.1999064 0.02591066 0.1654854
## 28     28 7.341409 0.3141103 5.904814 0.1983559 0.02638199 0.1631696
## 29     29 7.344570 0.3135537 5.908566 0.2043333 0.02586944 0.1638663
## 30     30 7.337272 0.3149170 5.902934 0.2081019 0.02632564 0.1696805
## 31     31 7.339954 0.3143990 5.907023 0.2010437 0.02513110 0.1591003
## 32     32 7.338828 0.3146080 5.905023 0.1981834 0.02489989 0.1548595
## 33     33 7.340442 0.3143516 5.905613 0.1986481 0.02576533 0.1555812
## 34     34 7.339249 0.3145844 5.905758 0.1990795 0.02655642 0.1529258
## 35     35 7.333354 0.3156875 5.901900 0.2014619 0.02702514 0.1555310
## 36     36 7.333004 0.3158009 5.904721 0.1994457 0.02677559 0.1541190
## 37     37 7.334284 0.3156385 5.908957 0.2026955 0.02717091 0.1587769
## 38     38 7.339179 0.3148081 5.912869 0.2055029 0.02845126 0.1561775
## 39     39 7.341424 0.3144611 5.913329 0.2052467 0.02923917 0.1554866
## 40     40 7.341302 0.3145265 5.911474 0.2063875 0.02924161 0.1575707
## 41     41 7.339212 0.3148899 5.911257 0.2062505 0.02858444 0.1577731
## 42     42 7.334771 0.3157442 5.909842 0.2098109 0.02887430 0.1616841
## 43     43 7.330881 0.3165587 5.906739 0.2110611 0.02959591 0.1621227
## 44     44 7.330660 0.3166361 5.905356 0.2146160 0.02969334 0.1636379
## 45     45 7.332318 0.3163169 5.904870 0.2155994 0.02990663 0.1658423
## 46     46 7.335940 0.3157156 5.908043 0.2179529 0.03019418 0.1683663
## 47     47 7.338713 0.3152849 5.908739 0.2202552 0.02981379 0.1702454
## 48     48 7.337127 0.3155711 5.908653 0.2132602 0.02882862 0.1656404
## 49     49 7.339354 0.3151311 5.910835 0.2122415 0.02839482 0.1666956
## 50     50 7.341343 0.3148032 5.913037 0.2105101 0.02755805 0.1673228
## 51     51 7.347014 0.3138861 5.917526 0.2070770 0.02748263 0.1657969
## 52     52 7.348548 0.3136027 5.917372 0.2119691 0.02766501 0.1699366
## 53     53 7.349969 0.3133871 5.915963 0.2127308 0.02796766 0.1687381
## 54     54 7.348148 0.3137159 5.915305 0.2135976 0.02781948 0.1678732
## 55     55 7.349728 0.3134384 5.916046 0.2162987 0.02799902 0.1715290
## 56     56 7.350533 0.3132753 5.920117 0.2170062 0.02769102 0.1737787
## 57     57 7.346865 0.3139529 5.917891 0.2125971 0.02743232 0.1702003
## 58     58 7.348898 0.3136586 5.920302 0.2106860 0.02771760 0.1677900
## 59     59 7.351876 0.3131572 5.919656 0.2101919 0.02732032 0.1683262
## 60     60 7.350944 0.3132959 5.919795 0.2132280 0.02767001 0.1691422
## 61     61 7.349537 0.3135740 5.919420 0.2163433 0.02779645 0.1730599
## 62     62 7.352547 0.3130798 5.922649 0.2141062 0.02775987 0.1737448
## 63     63 7.357916 0.3122024 5.929277 0.2164698 0.02857631 0.1744018
## 64     64 7.359869 0.3118823 5.929364 0.2157908 0.02833421 0.1725059
## 65     65 7.358468 0.3121649 5.928877 0.2172207 0.02876804 0.1721852
## 66     66 7.360398 0.3118027 5.930720 0.2140258 0.02812917 0.1683571
## 67     67 7.361002 0.3116943 5.929518 0.2143389 0.02789813 0.1697342
## 68     68 7.361350 0.3116833 5.930041 0.2156880 0.02799230 0.1722620
## 69     69 7.364436 0.3111078 5.931791 0.2112461 0.02731678 0.1680345
## 70     70 7.365518 0.3109746 5.930390 0.2090120 0.02750382 0.1678104
## 71     71 7.369721 0.3102297 5.934351 0.2059851 0.02720962 0.1647506
## 72     72 7.371019 0.3100168 5.935680 0.2079457 0.02763605 0.1655495
## 73     73 7.374104 0.3095516 5.938459 0.2086111 0.02793325 0.1650390
## 74     74 7.377201 0.3090178 5.941761 0.2099726 0.02809755 0.1668959
## 75     75 7.379647 0.3086448 5.944194 0.2094977 0.02795823 0.1659315
## 76     76 7.383294 0.3080129 5.947810 0.2118769 0.02823034 0.1682780
## 77     77 7.382272 0.3082000 5.947028 0.2102373 0.02818868 0.1682123
## 78     78 7.382930 0.3080967 5.946906 0.2112225 0.02803185 0.1704661
## 79     79 7.383526 0.3080548 5.947646 0.2117285 0.02826488 0.1712769
## 80     80 7.379734 0.3087316 5.943942 0.2122409 0.02865982 0.1700245
## 81     81 7.382146 0.3083327 5.945189 0.2122066 0.02886470 0.1696894
## 82     82 7.383048 0.3081864 5.946996 0.2137014 0.02876615 0.1703414
## 83     83 7.382408 0.3083009 5.947515 0.2132370 0.02837202 0.1697996
## 84     84 7.382527 0.3082899 5.946491 0.2137091 0.02881893 0.1702228
## 85     85 7.386831 0.3076032 5.948646 0.2162227 0.02919082 0.1724817
## 86     86 7.390024 0.3070393 5.950134 0.2168975 0.02910381 0.1736816
## 87     87 7.393075 0.3064922 5.952956 0.2181638 0.02907931 0.1771422
## 88     88 7.395706 0.3060763 5.955065 0.2221182 0.02952021 0.1789199
## 89     89 7.396010 0.3060240 5.955468 0.2228927 0.02948217 0.1787030
## 90     90 7.395843 0.3060698 5.954322 0.2244342 0.02954382 0.1784098
## 91     91 7.395439 0.3062006 5.956545 0.2221296 0.02964889 0.1765482
## 92     92 7.395111 0.3062180 5.957119 0.2224497 0.02965059 0.1771266
## 93     93 7.395447 0.3061514 5.956933 0.2229133 0.02914025 0.1803980
## 94     94 7.397219 0.3058495 5.958723 0.2222710 0.02889928 0.1793395
## 95     95 7.398455 0.3056162 5.958927 0.2213394 0.02880050 0.1779367
## 96     96 7.398054 0.3056970 5.959826 0.2205338 0.02859383 0.1774233
## 97     97 7.398230 0.3056905 5.959827 0.2202494 0.02850483 0.1771058
## 98     98 7.400987 0.3052538 5.962618 0.2200650 0.02869157 0.1771830
## 99     99 7.401423 0.3051859 5.962281 0.2210156 0.02884942 0.1771876
## 100   100 7.400593 0.3053067 5.962134 0.2225481 0.02930859 0.1795700
## 101   101 7.397973 0.3057652 5.958868 0.2221525 0.02921483 0.1793920
## 102   102 7.401043 0.3052679 5.961390 0.2230279 0.02932580 0.1794153
## 103   103 7.401544 0.3051647 5.960926 0.2199773 0.02904498 0.1784884
## 104   104 7.402901 0.3049448 5.960173 0.2198653 0.02912460 0.1789233
## 105   105 7.405201 0.3045634 5.961234 0.2182963 0.02875614 0.1783200
## 106   106 7.404674 0.3046553 5.960616 0.2169539 0.02861411 0.1773657
## 107   107 7.404978 0.3046251 5.961294 0.2163561 0.02855247 0.1769511
## 108   108 7.406907 0.3042977 5.962732 0.2163468 0.02849613 0.1764914
## 109   109 7.407707 0.3041842 5.963161 0.2153492 0.02836355 0.1751726
## 110   110 7.408199 0.3041131 5.963411 0.2145124 0.02833625 0.1754337
## 111   111 7.408834 0.3040052 5.964914 0.2136624 0.02811942 0.1760201
## 112   112 7.408927 0.3039923 5.965263 0.2125634 0.02833275 0.1755539
## 113   113 7.408641 0.3040236 5.964321 0.2122241 0.02787324 0.1758141
## 114   114 7.407328 0.3042460 5.963387 0.2125920 0.02786768 0.1761180
## 115   115 7.406649 0.3043341 5.963299 0.2128449 0.02809992 0.1759698
## 116   116 7.407164 0.3042226 5.964389 0.2119891 0.02763679 0.1757725
## 117   117 7.406921 0.3042581 5.964435 0.2113928 0.02754201 0.1756619
## 118   118 7.407195 0.3042418 5.964595 0.2111792 0.02775732 0.1762279
## 119   119 7.407373 0.3041924 5.964632 0.2119075 0.02794395 0.1765687
## 120   120 7.408267 0.3040644 5.965225 0.2122478 0.02801621 0.1762963
## 121   121 7.409286 0.3039067 5.965699 0.2125318 0.02804013 0.1767425
## 122   122 7.408948 0.3039924 5.964448 0.2115096 0.02789023 0.1765780
## 123   123 7.408194 0.3041171 5.964188 0.2113012 0.02779145 0.1773857
## 124   124 7.409094 0.3039572 5.964693 0.2116369 0.02751003 0.1773255
## 125   125 7.410755 0.3036824 5.965984 0.2131579 0.02731548 0.1789723
## 126   126 7.411260 0.3036035 5.965828 0.2115631 0.02734584 0.1778229
## 127   127 7.413210 0.3032829 5.967406 0.2103382 0.02714991 0.1777489
## 128   128 7.412094 0.3034751 5.966311 0.2098752 0.02709694 0.1775083
## 129   129 7.412913 0.3033294 5.966835 0.2098014 0.02676103 0.1771236
## 130   130 7.412356 0.3034074 5.967006 0.2100789 0.02687121 0.1763779
## 131   131 7.413986 0.3031150 5.967830 0.2108487 0.02664845 0.1766968
## 132   132 7.415892 0.3027787 5.968812 0.2105055 0.02674309 0.1765987
## 133   133 7.415875 0.3027845 5.968950 0.2106020 0.02671760 0.1768514
## 134   134 7.416600 0.3026666 5.969016 0.2107339 0.02652534 0.1771178
## 135   135 7.414588 0.3030412 5.967941 0.2115901 0.02676636 0.1785735
## 136   136 7.413532 0.3032211 5.968671 0.2115132 0.02696488 0.1775606
## 137   137 7.414257 0.3030908 5.969049 0.2106160 0.02703858 0.1771218
## 138   138 7.414185 0.3031079 5.969160 0.2110354 0.02708041 0.1764766
## 139   139 7.414375 0.3030934 5.969777 0.2114415 0.02731212 0.1762264
## 140   140 7.415883 0.3028347 5.970387 0.2120197 0.02744737 0.1761245
## 141   141 7.415247 0.3029558 5.969675 0.2129904 0.02755244 0.1766925
## 142   142 7.414854 0.3030408 5.970198 0.2138317 0.02766042 0.1768581
## 143   143 7.414596 0.3030859 5.969214 0.2143585 0.02766778 0.1769563
## 144   144 7.415284 0.3029784 5.969433 0.2147940 0.02779826 0.1775415
## 145   145 7.416192 0.3028409 5.969601 0.2141714 0.02781593 0.1760611
## 146   146 7.415684 0.3029599 5.968872 0.2143677 0.02801968 0.1759702
## 147   147 7.416506 0.3028249 5.970372 0.2128926 0.02778689 0.1746095
## 148   148 7.416839 0.3027766 5.970565 0.2135663 0.02788672 0.1747016
## 149   149 7.417129 0.3027072 5.969998 0.2138064 0.02771902 0.1742960
## 150   150 7.417139 0.3027246 5.969840 0.2147954 0.02788201 0.1745063
## 151   151 7.417286 0.3026846 5.970083 0.2156452 0.02769735 0.1758872
## 152   152 7.415519 0.3029565 5.968202 0.2158527 0.02763417 0.1760202
## 153   153 7.415671 0.3029133 5.967830 0.2156725 0.02740671 0.1758913
## 154   154 7.414781 0.3030922 5.966678 0.2149692 0.02738550 0.1764949
## 155   155 7.414305 0.3031655 5.966573 0.2146012 0.02737645 0.1765079
## 156   156 7.415162 0.3030179 5.967328 0.2153340 0.02773850 0.1766971
## 157   157 7.414924 0.3030625 5.966909 0.2154946 0.02775551 0.1762245
## 158   158 7.414307 0.3031599 5.966169 0.2156686 0.02766659 0.1770563
## 159   159 7.413602 0.3032965 5.965027 0.2157639 0.02755828 0.1778801
## 160   160 7.413002 0.3033842 5.964622 0.2157836 0.02755892 0.1780251
## 161   161 7.413589 0.3032919 5.965265 0.2163055 0.02763442 0.1782791
## 162   162 7.413590 0.3033016 5.965201 0.2166879 0.02777473 0.1784462
## 163   163 7.413327 0.3033498 5.964168 0.2168921 0.02784541 0.1786119
## 164   164 7.413504 0.3033013 5.964291 0.2172905 0.02762483 0.1781216
## 165   165 7.414293 0.3031739 5.965298 0.2168294 0.02766228 0.1780448
## 166   166 7.414557 0.3031343 5.965569 0.2175904 0.02775185 0.1796327
## 167   167 7.414661 0.3031076 5.966137 0.2174181 0.02751831 0.1790760
## 168   168 7.414764 0.3030891 5.966341 0.2186118 0.02760781 0.1796352
## 169   169 7.415814 0.3028983 5.967859 0.2178832 0.02760039 0.1792185
## 170   170 7.416008 0.3028598 5.968339 0.2173508 0.02766581 0.1802134
## 171   171 7.415769 0.3029057 5.967916 0.2167679 0.02756636 0.1795193
## 172   172 7.415346 0.3029761 5.967500 0.2168379 0.02764437 0.1800370
## 173   173 7.416056 0.3028672 5.968200 0.2165926 0.02772762 0.1795487
## 174   174 7.415402 0.3029810 5.968247 0.2174943 0.02787701 0.1797604
## 175   175 7.415579 0.3029576 5.967715 0.2168048 0.02781539 0.1786033
## 176   176 7.416271 0.3028409 5.968698 0.2169880 0.02775260 0.1785090
## 177   177 7.416319 0.3028429 5.968807 0.2165875 0.02772078 0.1783284
## 178   178 7.416171 0.3028610 5.968916 0.2159349 0.02760185 0.1779163
## 179   179 7.415668 0.3029473 5.968445 0.2158862 0.02768980 0.1786609
## 180   180 7.415925 0.3028992 5.968837 0.2166471 0.02774256 0.1793012
## 181   181 7.416515 0.3028006 5.968881 0.2170585 0.02768240 0.1794487
## 182   182 7.416189 0.3028667 5.967741 0.2179528 0.02776289 0.1808077
## 183   183 7.416580 0.3027977 5.968288 0.2174232 0.02779762 0.1803626
## 184   184 7.415796 0.3029317 5.967791 0.2176702 0.02788276 0.1805349
## 185   185 7.415608 0.3029535 5.968137 0.2170115 0.02772959 0.1802988
## 186   186 7.415456 0.3029721 5.967893 0.2172454 0.02761370 0.1808385
## 187   187 7.415751 0.3029328 5.967937 0.2167869 0.02763080 0.1806097
## 188   188 7.416090 0.3028687 5.968402 0.2165317 0.02761743 0.1805954
## 189   189 7.417074 0.3026970 5.968811 0.2167929 0.02759537 0.1809721
## 190   190 7.416832 0.3027310 5.968580 0.2175441 0.02764944 0.1821253
## 191   191 7.416817 0.3027345 5.968617 0.2170064 0.02755182 0.1820885
## 192   192 7.416214 0.3028303 5.968293 0.2172820 0.02754924 0.1826741
## 193   193 7.416653 0.3027508 5.968528 0.2172555 0.02749603 0.1825592
## 194   194 7.416345 0.3028091 5.968158 0.2171964 0.02755049 0.1824454
## 195   195 7.416932 0.3027111 5.968609 0.2166583 0.02740175 0.1819636
## 196   196 7.416344 0.3028164 5.968355 0.2171603 0.02752923 0.1822261
## 197   197 7.416698 0.3027541 5.968431 0.2174134 0.02757691 0.1825046
## 198   198 7.416570 0.3027722 5.968147 0.2168813 0.02753655 0.1820420
## 199   199 7.415706 0.3029202 5.967203 0.2170721 0.02746714 0.1825931
## 200   200 7.415951 0.3028819 5.967368 0.2171688 0.02735194 0.1828788
## 201   201 7.416531 0.3027881 5.967658 0.2179491 0.02748512 0.1833908
## 202   202 7.416095 0.3028639 5.967286 0.2179011 0.02745219 0.1830481
## 203   203 7.416141 0.3028492 5.967021 0.2178102 0.02739503 0.1831308
## 204   204 7.416053 0.3028576 5.967011 0.2182767 0.02737663 0.1833745
## 205   205 7.416332 0.3028019 5.967617 0.2183151 0.02735860 0.1834144
## 206   206 7.416762 0.3027277 5.968254 0.2187577 0.02742260 0.1836437
## 207   207 7.416392 0.3027927 5.967901 0.2189979 0.02741995 0.1837454
## 208   208 7.416727 0.3027275 5.968298 0.2188550 0.02734816 0.1835867
## 209   209 7.416701 0.3027377 5.968182 0.2191144 0.02736667 0.1836838
## 210   210 7.416836 0.3027163 5.968273 0.2189383 0.02732542 0.1833184
## 211   211 7.416529 0.3027658 5.968172 0.2187148 0.02728541 0.1831374
## 212   212 7.416779 0.3027213 5.968390 0.2186760 0.02724854 0.1829606
## 213   213 7.416606 0.3027537 5.968267 0.2187645 0.02730740 0.1829852
## 214   214 7.416590 0.3027602 5.968294 0.2191432 0.02736031 0.1832778
## 215   215 7.416298 0.3028134 5.968181 0.2190197 0.02741053 0.1832419
## 216   216 7.416520 0.3027747 5.968301 0.2188351 0.02735967 0.1833435
## 217   217 7.416166 0.3028375 5.968059 0.2186771 0.02735722 0.1830073
## 218   218 7.416420 0.3027947 5.967919 0.2187816 0.02739440 0.1829818
## 219   219 7.416151 0.3028383 5.967665 0.2183855 0.02730469 0.1828074
## 220   220 7.416265 0.3028156 5.967812 0.2182717 0.02731807 0.1824960
## 221   221 7.416118 0.3028430 5.967659 0.2181610 0.02732343 0.1822512
## 222   222 7.416195 0.3028293 5.967717 0.2180447 0.02731438 0.1820859
## 223   223 7.416345 0.3028024 5.967802 0.2181462 0.02737091 0.1821911
## 224   224 7.416384 0.3027934 5.967811 0.2181387 0.02735555 0.1821795
## 225   225 7.416390 0.3027907 5.967778 0.2180434 0.02731848 0.1820977
## 226   226 7.416468 0.3027766 5.967844 0.2179505 0.02729115 0.1819547
## 227   227 7.416482 0.3027754 5.967785 0.2179532 0.02727520 0.1820210
## 228   228 7.416638 0.3027505 5.967947 0.2180478 0.02726849 0.1820805
## 229   229 7.416303 0.3028058 5.967631 0.2181335 0.02728514 0.1821199
## 230   230 7.416250 0.3028145 5.967632 0.2180098 0.02726442 0.1819889
## 231   231 7.416267 0.3028119 5.967682 0.2178394 0.02723258 0.1817953
## 232   232 7.416231 0.3028211 5.967587 0.2177783 0.02723500 0.1816916
## 233   233 7.416227 0.3028214 5.967588 0.2176996 0.02723687 0.1816360
## 234   234 7.416163 0.3028312 5.967460 0.2176465 0.02723778 0.1816568
## 235   235 7.416083 0.3028448 5.967348 0.2176762 0.02724708 0.1817396
## 236   236 7.416058 0.3028499 5.967376 0.2176759 0.02726072 0.1816618
## 237   237 7.416085 0.3028466 5.967409 0.2176160 0.02726014 0.1816138
## 238   238 7.416116 0.3028413 5.967417 0.2176928 0.02727097 0.1816849
## 239   239 7.416087 0.3028466 5.967375 0.2176606 0.02726567 0.1816421
## 240   240 7.416075 0.3028485 5.967363 0.2176692 0.02726589 0.1816577
##    nvmax
## 12    12
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Coefficients of final model:

##   (Intercept)            x4            x7            x8            x9 
##  8.645541e+01 -1.710005e-02  3.277989e+00  1.514512e-01  9.774137e-01 
##           x10           x11           x16           x17        stat14 
##  4.530726e-01  7.856504e+07  2.800699e-01  4.037473e-01 -2.873338e-01 
##        stat98       stat110      sqrt.x18 
##  9.434815e-01 -8.644369e-01  7.264493e+00

Test

if (algo.forward.caret == TRUE){
  test.model(model.forward, data.test
             ,method = 'leapForward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   107.0   120.4   123.8   123.9   127.4   139.2 
## [1] "leapForward  Test MSE: 95.3468824372506"

Backward Elimination

Train

if (algo.backward == TRUE){
  # Takes too much time
  t1 = Sys.time()
  
  model.backward = step(model.full, data = data.train, direction="backward", trace = 0)
  print(summary(model.backward))
  #saveRDS(model.forward,file = "model_backward.rds")
  
  t2 = Sys.time()
  print (paste("Time taken for Backward Elimination: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.backward, data.train)
}

Test

if (algo.backward == TRUE){
  test.model(model.backard, data.test, "Backward Elimination")
}

Backward Elimination with CV (w/ full train)

Train

if (algo.backward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "leapBackward"
                                   ,feature.names =  feature.names)
  model.backward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 9 on full training set
##     nvmax      RMSE  Rsquared      MAE    RMSESD RsquaredSD     MAESD
## 1       1 10.190312 0.1031351 7.782556 0.2986960 0.01510985 0.1630465
## 2       2  9.959863 0.1431490 7.586895 0.3329232 0.01527567 0.1507478
## 3       3  9.800520 0.1706239 7.425932 0.3371193 0.02089715 0.1510136
## 4       4  9.664292 0.1935352 7.225063 0.3244736 0.02143874 0.1193851
## 5       5  9.581672 0.2073775 7.156896 0.3424689 0.02081535 0.1340120
## 6       6  9.555257 0.2117661 7.137916 0.3542029 0.02335306 0.1291911
## 7       7  9.559823 0.2111601 7.139880 0.3644482 0.02461551 0.1382767
## 8       8  9.547579 0.2133689 7.128788 0.3651827 0.02538635 0.1329768
## 9       9  9.521929 0.2176251 7.121917 0.3669530 0.02593030 0.1336222
## 10     10  9.527194 0.2165947 7.122196 0.3669707 0.02380088 0.1352945
## 11     11  9.533718 0.2154997 7.126706 0.3659198 0.02332300 0.1364556
## 12     12  9.541785 0.2142511 7.134061 0.3701140 0.02326252 0.1373844
## 13     13  9.534556 0.2154392 7.126870 0.3663178 0.02296114 0.1288239
## 14     14  9.536855 0.2150780 7.132556 0.3624797 0.02219343 0.1294582
## 15     15  9.544337 0.2139315 7.140683 0.3659221 0.02288397 0.1329420
## 16     16  9.545968 0.2136472 7.143982 0.3630897 0.02314586 0.1312100
## 17     17  9.544270 0.2139968 7.142018 0.3599857 0.02343083 0.1326030
## 18     18  9.543897 0.2140092 7.134101 0.3539441 0.02290571 0.1254990
## 19     19  9.552811 0.2126848 7.139211 0.3624077 0.02300716 0.1298498
## 20     20  9.553320 0.2126449 7.137766 0.3731946 0.02387889 0.1382055
## 21     21  9.559763 0.2116480 7.137769 0.3673744 0.02303394 0.1388360
## 22     22  9.564709 0.2108593 7.141372 0.3617888 0.02269872 0.1363974
## 23     23  9.571290 0.2098580 7.146539 0.3586441 0.02288093 0.1340659
## 24     24  9.570587 0.2099931 7.147502 0.3523013 0.02220142 0.1275588
## 25     25  9.573535 0.2096418 7.149981 0.3607139 0.02309019 0.1330336
## 26     26  9.580184 0.2086789 7.151667 0.3600726 0.02309464 0.1318660
## 27     27  9.582890 0.2082912 7.154069 0.3577557 0.02267486 0.1227621
## 28     28  9.588842 0.2074296 7.158234 0.3571512 0.02216884 0.1227508
## 29     29  9.584958 0.2081113 7.158470 0.3584721 0.02175998 0.1327570
## 30     30  9.586310 0.2079786 7.157073 0.3555134 0.02161703 0.1277553
## 31     31  9.586259 0.2080732 7.156119 0.3578632 0.02219275 0.1287727
## 32     32  9.591922 0.2072105 7.160248 0.3560463 0.02219070 0.1282975
## 33     33  9.595646 0.2066643 7.163133 0.3605345 0.02280922 0.1291767
## 34     34  9.597433 0.2064269 7.160869 0.3608354 0.02310251 0.1309469
## 35     35  9.596532 0.2066153 7.161180 0.3644106 0.02316878 0.1374475
## 36     36  9.600573 0.2060610 7.163776 0.3598456 0.02288715 0.1344629
## 37     37  9.602651 0.2057358 7.166564 0.3640418 0.02295630 0.1349412
## 38     38  9.604512 0.2054963 7.167453 0.3635753 0.02310241 0.1317210
## 39     39  9.608805 0.2048704 7.171345 0.3625596 0.02377330 0.1300607
## 40     40  9.612821 0.2042201 7.176365 0.3650424 0.02393346 0.1307109
## 41     41  9.615674 0.2038665 7.176778 0.3657859 0.02408658 0.1284858
## 42     42  9.620092 0.2031618 7.179780 0.3649969 0.02407763 0.1271363
## 43     43  9.617554 0.2035821 7.180054 0.3641159 0.02364274 0.1284856
## 44     44  9.622203 0.2029127 7.182531 0.3655881 0.02392387 0.1281852
## 45     45  9.620965 0.2031279 7.180809 0.3668510 0.02454173 0.1307345
## 46     46  9.627326 0.2021685 7.186062 0.3665007 0.02438629 0.1299451
## 47     47  9.626831 0.2023128 7.183640 0.3633504 0.02415461 0.1296926
## 48     48  9.629449 0.2020036 7.185661 0.3681793 0.02508921 0.1333933
## 49     49  9.632236 0.2015898 7.188730 0.3661710 0.02475550 0.1364361
## 50     50  9.634364 0.2012997 7.189078 0.3690645 0.02449571 0.1382128
## 51     51  9.636333 0.2010273 7.190305 0.3684947 0.02464871 0.1385368
## 52     52  9.638118 0.2007850 7.189475 0.3715292 0.02490093 0.1401726
## 53     53  9.635758 0.2011847 7.189408 0.3754393 0.02480993 0.1430953
## 54     54  9.641651 0.2003322 7.195028 0.3747249 0.02478665 0.1447869
## 55     55  9.645953 0.1997105 7.198867 0.3695552 0.02422177 0.1425531
## 56     56  9.643378 0.2001302 7.195854 0.3724393 0.02447098 0.1451748
## 57     57  9.642504 0.2003344 7.194781 0.3746885 0.02489110 0.1474726
## 58     58  9.642993 0.2003523 7.193771 0.3766599 0.02540686 0.1477145
## 59     59  9.643843 0.2002711 7.193387 0.3777781 0.02565320 0.1447340
## 60     60  9.644867 0.2000700 7.194525 0.3760730 0.02538877 0.1462398
## 61     61  9.645274 0.2000542 7.193568 0.3780859 0.02584991 0.1484062
## 62     62  9.645656 0.2000454 7.195392 0.3811767 0.02652045 0.1548391
## 63     63  9.646682 0.1999629 7.195098 0.3852664 0.02680071 0.1555922
## 64     64  9.647912 0.1997981 7.199289 0.3818879 0.02655091 0.1503756
## 65     65  9.650644 0.1994318 7.201359 0.3820413 0.02652290 0.1492427
## 66     66  9.655988 0.1986667 7.204099 0.3824136 0.02658527 0.1485457
## 67     67  9.654643 0.1988875 7.201428 0.3830769 0.02669010 0.1466867
## 68     68  9.659585 0.1981830 7.206828 0.3838052 0.02702685 0.1479042
## 69     69  9.658789 0.1982725 7.207418 0.3837510 0.02695470 0.1471189
## 70     70  9.659897 0.1981735 7.207662 0.3837650 0.02754856 0.1473266
## 71     71  9.656680 0.1986461 7.206912 0.3840162 0.02768464 0.1493677
## 72     72  9.656843 0.1986314 7.207565 0.3775827 0.02757698 0.1427700
## 73     73  9.655694 0.1988422 7.207053 0.3797981 0.02793264 0.1451658
## 74     74  9.656107 0.1988522 7.207699 0.3817656 0.02813751 0.1476252
## 75     75  9.657427 0.1986741 7.209907 0.3774182 0.02767003 0.1453632
## 76     76  9.659277 0.1983849 7.211745 0.3776379 0.02774349 0.1456085
## 77     77  9.658539 0.1985057 7.211854 0.3791945 0.02759815 0.1473959
## 78     78  9.658459 0.1985631 7.211649 0.3795358 0.02737591 0.1493507
## 79     79  9.657268 0.1987766 7.211088 0.3807275 0.02735084 0.1518364
## 80     80  9.657817 0.1986751 7.212067 0.3780166 0.02737630 0.1505015
## 81     81  9.657363 0.1987921 7.212589 0.3761064 0.02726897 0.1477484
## 82     82  9.658199 0.1986666 7.212784 0.3745537 0.02725415 0.1469726
## 83     83  9.657736 0.1987820 7.211611 0.3740285 0.02680278 0.1461157
## 84     84  9.659667 0.1984927 7.213057 0.3729812 0.02721824 0.1445052
## 85     85  9.658780 0.1986216 7.214097 0.3739252 0.02725570 0.1474515
## 86     86  9.660384 0.1984031 7.214306 0.3702067 0.02701949 0.1443707
## 87     87  9.661047 0.1982949 7.214634 0.3679313 0.02695587 0.1429859
## 88     88  9.663019 0.1980076 7.217696 0.3693278 0.02710663 0.1445204
## 89     89  9.662051 0.1981779 7.218438 0.3715162 0.02717557 0.1469989
## 90     90  9.663033 0.1980434 7.219780 0.3717267 0.02688421 0.1462612
## 91     91  9.665359 0.1977369 7.223641 0.3703914 0.02669306 0.1445558
## 92     92  9.667422 0.1974664 7.225569 0.3723655 0.02661574 0.1442257
## 93     93  9.666028 0.1976662 7.225822 0.3714114 0.02634170 0.1459799
## 94     94  9.667722 0.1974280 7.226816 0.3709153 0.02616809 0.1439857
## 95     95  9.668599 0.1973385 7.226451 0.3713760 0.02655574 0.1449040
## 96     96  9.669764 0.1971784 7.228232 0.3725758 0.02624939 0.1471385
## 97     97  9.670348 0.1971195 7.228822 0.3714152 0.02631358 0.1457294
## 98     98  9.669941 0.1972003 7.228287 0.3713677 0.02616025 0.1474314
## 99     99  9.670825 0.1970529 7.229588 0.3697290 0.02604370 0.1440365
## 100   100  9.667626 0.1975169 7.226049 0.3676960 0.02616840 0.1427002
## 101   101  9.666125 0.1977483 7.224694 0.3672140 0.02606080 0.1448217
## 102   102  9.666471 0.1977042 7.225435 0.3675243 0.02609824 0.1445758
## 103   103  9.668916 0.1973983 7.225567 0.3699582 0.02647499 0.1467489
## 104   104  9.669900 0.1972395 7.227153 0.3700397 0.02632758 0.1465122
## 105   105  9.671843 0.1969696 7.228753 0.3721074 0.02620231 0.1453505
## 106   106  9.673717 0.1967033 7.230173 0.3729474 0.02594625 0.1461902
## 107   107  9.674119 0.1966524 7.229660 0.3730648 0.02581951 0.1451711
## 108   108  9.673164 0.1967769 7.229619 0.3732096 0.02594298 0.1440763
## 109   109  9.672967 0.1968470 7.229417 0.3720702 0.02573446 0.1423604
## 110   110  9.672678 0.1968985 7.228983 0.3722670 0.02608570 0.1431917
## 111   111  9.675734 0.1964287 7.231850 0.3713177 0.02582189 0.1428699
## 112   112  9.675370 0.1965201 7.230888 0.3727961 0.02605881 0.1440506
## 113   113  9.677381 0.1962326 7.231280 0.3717835 0.02621544 0.1438596
## 114   114  9.678637 0.1960670 7.231898 0.3701629 0.02608369 0.1433483
## 115   115  9.677621 0.1962796 7.232310 0.3715995 0.02608322 0.1437189
## 116   116  9.678766 0.1961147 7.232171 0.3723030 0.02597248 0.1450088
## 117   117  9.679749 0.1959643 7.232437 0.3713013 0.02557724 0.1448885
## 118   118  9.682489 0.1955844 7.233911 0.3732240 0.02557676 0.1457529
## 119   119  9.680828 0.1958399 7.232499 0.3744536 0.02556100 0.1469209
## 120   120  9.678624 0.1962101 7.230625 0.3734304 0.02555591 0.1463556
## 121   121  9.679156 0.1961206 7.232026 0.3723903 0.02555874 0.1441594
## 122   122  9.678398 0.1962121 7.230840 0.3727551 0.02568723 0.1444858
## 123   123  9.679154 0.1961491 7.231018 0.3740559 0.02589174 0.1452397
## 124   124  9.680818 0.1958835 7.233499 0.3742318 0.02597716 0.1466699
## 125   125  9.681416 0.1957975 7.233911 0.3753578 0.02619934 0.1477709
## 126   126  9.682177 0.1957067 7.235046 0.3770685 0.02631372 0.1491907
## 127   127  9.680928 0.1958879 7.233689 0.3765737 0.02609177 0.1477921
## 128   128  9.680511 0.1959813 7.233824 0.3790448 0.02620182 0.1492414
## 129   129  9.681080 0.1958897 7.234039 0.3767495 0.02574496 0.1475254
## 130   130  9.681393 0.1958525 7.235131 0.3782673 0.02625213 0.1490189
## 131   131  9.682113 0.1957394 7.236471 0.3776237 0.02613940 0.1490901
## 132   132  9.683260 0.1955630 7.236452 0.3771310 0.02561491 0.1473387
## 133   133  9.683002 0.1956105 7.237153 0.3765145 0.02541078 0.1458407
## 134   134  9.682651 0.1956580 7.236623 0.3762711 0.02527905 0.1465806
## 135   135  9.682170 0.1957515 7.235653 0.3781639 0.02554052 0.1476960
## 136   136  9.682842 0.1956807 7.235370 0.3794354 0.02596492 0.1485560
## 137   137  9.682404 0.1957296 7.235497 0.3772180 0.02559233 0.1473255
## 138   138  9.681885 0.1958255 7.235850 0.3763109 0.02564181 0.1479026
## 139   139  9.679927 0.1961490 7.233167 0.3768134 0.02569605 0.1480632
## 140   140  9.680960 0.1959955 7.233873 0.3748578 0.02569700 0.1476651
## 141   141  9.682615 0.1957576 7.234775 0.3737172 0.02577119 0.1455488
## 142   142  9.682938 0.1957170 7.235089 0.3745579 0.02602726 0.1445949
## 143   143  9.682582 0.1958027 7.236104 0.3754934 0.02600018 0.1449689
## 144   144  9.681572 0.1959289 7.236007 0.3755640 0.02589335 0.1465314
## 145   145  9.680514 0.1960791 7.235003 0.3758091 0.02589886 0.1470943
## 146   146  9.680358 0.1961017 7.234932 0.3751057 0.02572356 0.1472463
## 147   147  9.679690 0.1962049 7.235613 0.3735680 0.02533014 0.1477029
## 148   148  9.680431 0.1961091 7.235340 0.3749759 0.02540462 0.1484899
## 149   149  9.681036 0.1960445 7.236321 0.3755377 0.02562053 0.1482802
## 150   150  9.679581 0.1962732 7.235592 0.3765216 0.02564565 0.1489431
## 151   151  9.679472 0.1962758 7.236757 0.3772506 0.02570654 0.1498694
## 152   152  9.680707 0.1961036 7.237153 0.3775436 0.02583079 0.1508709
## 153   153  9.680566 0.1961056 7.237479 0.3775819 0.02567087 0.1515347
## 154   154  9.681711 0.1959528 7.237324 0.3787591 0.02581086 0.1523860
## 155   155  9.682136 0.1959192 7.237958 0.3782622 0.02579551 0.1537416
## 156   156  9.681302 0.1960451 7.237513 0.3780378 0.02582054 0.1521891
## 157   157  9.680701 0.1961322 7.236825 0.3777809 0.02575759 0.1524903
## 158   158  9.681590 0.1960019 7.237556 0.3786909 0.02590042 0.1540943
## 159   159  9.682255 0.1959106 7.237112 0.3801772 0.02593384 0.1556453
## 160   160  9.683148 0.1958033 7.238510 0.3805959 0.02615575 0.1568874
## 161   161  9.683460 0.1957487 7.238641 0.3816679 0.02616930 0.1577370
## 162   162  9.684812 0.1955765 7.240006 0.3828818 0.02615581 0.1588349
## 163   163  9.685683 0.1954553 7.241075 0.3835937 0.02619312 0.1590058
## 164   164  9.685889 0.1954211 7.241201 0.3832258 0.02626943 0.1589375
## 165   165  9.686601 0.1953420 7.241714 0.3850308 0.02648571 0.1613849
## 166   166  9.686651 0.1953351 7.241133 0.3859684 0.02651288 0.1619167
## 167   167  9.687549 0.1951947 7.242196 0.3856697 0.02640606 0.1620496
## 168   168  9.687862 0.1951351 7.242776 0.3851438 0.02649108 0.1617260
## 169   169  9.687078 0.1952596 7.242026 0.3853192 0.02649081 0.1621757
## 170   170  9.687410 0.1952328 7.242428 0.3862508 0.02655982 0.1635262
## 171   171  9.688005 0.1951359 7.242679 0.3868254 0.02652633 0.1637290
## 172   172  9.687434 0.1952270 7.242334 0.3866840 0.02655763 0.1632818
## 173   173  9.687423 0.1952281 7.243023 0.3855069 0.02637515 0.1627001
## 174   174  9.687112 0.1952805 7.242960 0.3848697 0.02624733 0.1609496
## 175   175  9.686799 0.1953232 7.242719 0.3858285 0.02646321 0.1611256
## 176   176  9.686660 0.1953527 7.243230 0.3848990 0.02638029 0.1616791
## 177   177  9.687150 0.1952842 7.243371 0.3856971 0.02642053 0.1621661
## 178   178  9.686122 0.1954324 7.243487 0.3862468 0.02647432 0.1626604
## 179   179  9.686228 0.1954183 7.242996 0.3863581 0.02650800 0.1629576
## 180   180  9.686424 0.1953998 7.243285 0.3866935 0.02652466 0.1631703
## 181   181  9.686562 0.1953837 7.243792 0.3874945 0.02657704 0.1627951
## 182   182  9.686730 0.1953661 7.244403 0.3874871 0.02656342 0.1622812
## 183   183  9.686650 0.1953697 7.244044 0.3883764 0.02659349 0.1631177
## 184   184  9.686962 0.1953259 7.244266 0.3890472 0.02666132 0.1646269
## 185   185  9.687991 0.1951677 7.244157 0.3887653 0.02651650 0.1639586
## 186   186  9.688035 0.1951550 7.244154 0.3881145 0.02656480 0.1634031
## 187   187  9.687536 0.1952358 7.243995 0.3882394 0.02661836 0.1635012
## 188   188  9.687498 0.1952262 7.243998 0.3880765 0.02662024 0.1627912
## 189   189  9.687754 0.1951914 7.244335 0.3891155 0.02663668 0.1626310
## 190   190  9.687797 0.1951946 7.244848 0.3889014 0.02667953 0.1630963
## 191   191  9.688178 0.1951319 7.244664 0.3883801 0.02660549 0.1627665
## 192   192  9.688433 0.1950903 7.244603 0.3891249 0.02652796 0.1629784
## 193   193  9.688380 0.1951001 7.245135 0.3888890 0.02643293 0.1619106
## 194   194  9.688121 0.1951217 7.245088 0.3881569 0.02636925 0.1614809
## 195   195  9.688222 0.1951060 7.244962 0.3880575 0.02635469 0.1613041
## 196   196  9.688887 0.1950042 7.245640 0.3872290 0.02627004 0.1603557
## 197   197  9.688680 0.1950349 7.245198 0.3877506 0.02632438 0.1608187
## 198   198  9.688401 0.1950764 7.245310 0.3885380 0.02644858 0.1616780
## 199   199  9.688461 0.1950461 7.245544 0.3877857 0.02633977 0.1614304
## 200   200  9.688306 0.1950794 7.245339 0.3881484 0.02640337 0.1617859
## 201   201  9.687820 0.1951452 7.245034 0.3882991 0.02640802 0.1614124
## 202   202  9.687374 0.1952097 7.244802 0.3880539 0.02636858 0.1612538
## 203   203  9.686905 0.1952841 7.244417 0.3883018 0.02627585 0.1612692
## 204   204  9.686634 0.1953229 7.244277 0.3884559 0.02620654 0.1618480
## 205   205  9.687269 0.1952278 7.245158 0.3884057 0.02627348 0.1618148
## 206   206  9.688149 0.1950945 7.245568 0.3887623 0.02624221 0.1620454
## 207   207  9.688335 0.1950688 7.245606 0.3889540 0.02625486 0.1621014
## 208   208  9.688607 0.1950285 7.245797 0.3882448 0.02615836 0.1612334
## 209   209  9.689119 0.1949577 7.246420 0.3884732 0.02616674 0.1614659
## 210   210  9.689410 0.1949172 7.246674 0.3884985 0.02618403 0.1619724
## 211   211  9.689874 0.1948647 7.247263 0.3888610 0.02628413 0.1620669
## 212   212  9.689386 0.1949426 7.246849 0.3892884 0.02634964 0.1623896
## 213   213  9.689538 0.1949256 7.247005 0.3894320 0.02637767 0.1625640
## 214   214  9.689415 0.1949425 7.246903 0.3896395 0.02636160 0.1627810
## 215   215  9.689336 0.1949556 7.246885 0.3896715 0.02638907 0.1630845
## 216   216  9.689278 0.1949713 7.246751 0.3899865 0.02644316 0.1631724
## 217   217  9.689429 0.1949494 7.246887 0.3898691 0.02639327 0.1629683
## 218   218  9.689475 0.1949462 7.246845 0.3902329 0.02639651 0.1633584
## 219   219  9.689426 0.1949546 7.246782 0.3902348 0.02637394 0.1632648
## 220   220  9.689599 0.1949281 7.246868 0.3903022 0.02639606 0.1634259
## 221   221  9.689562 0.1949375 7.246853 0.3906231 0.02643759 0.1637050
## 222   222  9.689349 0.1949743 7.246496 0.3907741 0.02647679 0.1639209
## 223   223  9.689571 0.1949439 7.246763 0.3908481 0.02649005 0.1639418
## 224   224  9.689345 0.1949736 7.246635 0.3909025 0.02648784 0.1639960
## 225   225  9.689357 0.1949749 7.246645 0.3907742 0.02647186 0.1637769
## 226   226  9.689329 0.1949781 7.246578 0.3907709 0.02646672 0.1637307
## 227   227  9.689298 0.1949824 7.246563 0.3908538 0.02647663 0.1637775
## 228   228  9.689258 0.1949882 7.246508 0.3908131 0.02647922 0.1638110
## 229   229  9.689107 0.1950106 7.246410 0.3908175 0.02649283 0.1637711
## 230   230  9.689072 0.1950141 7.246445 0.3908223 0.02647661 0.1638339
## 231   231  9.689184 0.1949993 7.246531 0.3908957 0.02648865 0.1639251
## 232   232  9.689154 0.1950023 7.246441 0.3908435 0.02647868 0.1638115
## 233   233  9.689027 0.1950207 7.246308 0.3908113 0.02647664 0.1636636
## 234   234  9.689106 0.1950121 7.246375 0.3909201 0.02649116 0.1637975
## 235   235  9.689050 0.1950188 7.246326 0.3908231 0.02648090 0.1637609
## 236   236  9.689049 0.1950187 7.246335 0.3908174 0.02648176 0.1637727
## 237   237  9.689064 0.1950160 7.246321 0.3908130 0.02648343 0.1636916
## 238   238  9.689039 0.1950197 7.246260 0.3907944 0.02648188 0.1636317
## 239   239  9.689024 0.1950218 7.246221 0.3907945 0.02647965 0.1636074
## 240   240  9.689033 0.1950205 7.246237 0.3907893 0.02647943 0.1636052
##   nvmax
## 9     9
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Coefficients of final model:

## (Intercept)          x4          x7          x9         x10         x16 
##  96.8126929  -0.0150172   3.1893069   0.9833780   0.3546913   0.2736344 
##         x17      stat98     stat110    sqrt.x18 
##   0.4098622   1.0203902  -0.9352919   7.5838139

Test

if (algo.backward.caret == TRUE){
  test.model(model.backward, data.test
             ,method = 'leapBackward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   109.0   121.5   125.1   125.2   128.9   139.6 
## [1] "leapBackward  Test MSE: 93.5015990422204"

Backward Elimination with CV (w/ filtered train)

Train

if (algo.backward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method =  "leapBackward"
                                   ,feature.names = feature.names)
  model.backward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 12 on full training set
##     nvmax     RMSE  Rsquared      MAE    RMSESD RsquaredSD     MAESD
## 1       1 8.210706 0.1435527 6.621521 0.2159950 0.04104185 0.1629293
## 2       2 7.907521 0.2044789 6.401711 0.1810466 0.03408236 0.1252524
## 3       3 7.730850 0.2397063 6.231490 0.1702484 0.03156176 0.1466516
## 4       4 7.579309 0.2686629 6.047799 0.1662627 0.02641710 0.1393506
## 5       5 7.472423 0.2890728 5.963578 0.1893338 0.02674589 0.1533025
## 6       6 7.429208 0.2974001 5.937806 0.1973226 0.03031935 0.1632963
## 7       7 7.421047 0.2990612 5.944857 0.2024926 0.03113068 0.1582367
## 8       8 7.405777 0.3017923 5.945689 0.2200323 0.02788967 0.1637757
## 9       9 7.381234 0.3064082 5.925445 0.2139996 0.02569411 0.1632100
## 10     10 7.355087 0.3112072 5.905256 0.2165269 0.02544152 0.1609382
## 11     11 7.347002 0.3127548 5.898460 0.2124384 0.02549727 0.1574845
## 12     12 7.323722 0.3170669 5.885196 0.2180591 0.02639073 0.1655038
## 13     13 7.335574 0.3148895 5.891887 0.2195827 0.02640011 0.1674149
## 14     14 7.341656 0.3138854 5.897824 0.2141621 0.02822196 0.1626993
## 15     15 7.347532 0.3128296 5.902531 0.2179237 0.02839216 0.1666271
## 16     16 7.345003 0.3132607 5.896398 0.2127767 0.02827226 0.1642509
## 17     17 7.344569 0.3133148 5.899189 0.2147645 0.02818473 0.1643354
## 18     18 7.343376 0.3134977 5.896178 0.2094949 0.02732696 0.1614053
## 19     19 7.342787 0.3136077 5.897887 0.2070675 0.02667938 0.1597983
## 20     20 7.343742 0.3134637 5.900087 0.2061562 0.02728069 0.1586573
## 21     21 7.349809 0.3124226 5.905339 0.2068412 0.02796785 0.1555892
## 22     22 7.353301 0.3117828 5.910233 0.2077222 0.02796162 0.1567299
## 23     23 7.350183 0.3123431 5.905662 0.2090116 0.02748881 0.1630853
## 24     24 7.346425 0.3130176 5.902851 0.2014126 0.02692238 0.1574742
## 25     25 7.347367 0.3129223 5.908193 0.1978110 0.02673383 0.1564076
## 26     26 7.343130 0.3137302 5.906502 0.2053065 0.02630365 0.1645743
## 27     27 7.340262 0.3142821 5.901891 0.2013480 0.02625824 0.1656959
## 28     28 7.338302 0.3146883 5.900277 0.2018190 0.02643311 0.1638495
## 29     29 7.342129 0.3140067 5.905262 0.2071231 0.02657893 0.1648602
## 30     30 7.333349 0.3156058 5.898457 0.2079428 0.02703278 0.1679786
## 31     31 7.339450 0.3144862 5.904868 0.2025331 0.02542195 0.1583461
## 32     32 7.340665 0.3142887 5.907298 0.2034224 0.02629342 0.1578937
## 33     33 7.342626 0.3139577 5.908741 0.2034346 0.02660190 0.1595019
## 34     34 7.339298 0.3145770 5.907794 0.2032254 0.02711750 0.1597803
## 35     35 7.333354 0.3156875 5.901900 0.2014619 0.02702514 0.1555310
## 36     36 7.334790 0.3154572 5.905550 0.2025002 0.02686454 0.1558637
## 37     37 7.337579 0.3150065 5.910045 0.2021846 0.02699130 0.1547611
## 38     38 7.339357 0.3147506 5.911553 0.2060265 0.02854571 0.1566071
## 39     39 7.342065 0.3143174 5.912935 0.2056906 0.02922346 0.1568407
## 40     40 7.342418 0.3142949 5.911616 0.2076779 0.02933165 0.1580016
## 41     41 7.339011 0.3149084 5.911756 0.2106466 0.02937531 0.1594547
## 42     42 7.332894 0.3160983 5.908919 0.2112712 0.02961083 0.1605133
## 43     43 7.331376 0.3164591 5.905935 0.2119348 0.02964066 0.1619631
## 44     44 7.329411 0.3167972 5.903041 0.2187635 0.02988777 0.1653733
## 45     45 7.330833 0.3165931 5.903834 0.2174106 0.03011596 0.1674600
## 46     46 7.334165 0.3160383 5.907026 0.2156465 0.03022396 0.1673065
## 47     47 7.337588 0.3154844 5.908226 0.2175555 0.02962472 0.1690830
## 48     48 7.340146 0.3150414 5.911950 0.2156643 0.02904458 0.1679701
## 49     49 7.340872 0.3148863 5.911222 0.2143267 0.02867156 0.1694076
## 50     50 7.343496 0.3144556 5.912548 0.2137136 0.02816306 0.1697390
## 51     51 7.347816 0.3137562 5.916753 0.2095761 0.02808233 0.1670888
## 52     52 7.345582 0.3141333 5.914371 0.2132303 0.02849560 0.1700247
## 53     53 7.346909 0.3139377 5.912943 0.2140948 0.02883569 0.1688090
## 54     54 7.346819 0.3139882 5.913376 0.2134009 0.02852018 0.1680234
## 55     55 7.346650 0.3140134 5.913329 0.2137320 0.02872047 0.1658874
## 56     56 7.343521 0.3145694 5.911323 0.2152958 0.02936822 0.1698070
## 57     57 7.342988 0.3146939 5.912153 0.2136638 0.02851954 0.1659090
## 58     58 7.348023 0.3138216 5.918427 0.2136191 0.02788197 0.1687250
## 59     59 7.349430 0.3135762 5.919056 0.2180247 0.02809545 0.1731068
## 60     60 7.350089 0.3134791 5.919243 0.2120574 0.02756839 0.1702572
## 61     61 7.350538 0.3134304 5.919421 0.2120689 0.02787809 0.1727728
## 62     62 7.352920 0.3130472 5.923454 0.2152672 0.02847104 0.1764597
## 63     63 7.354574 0.3128080 5.925381 0.2150128 0.02858189 0.1755579
## 64     64 7.354766 0.3127953 5.926239 0.2153854 0.02821589 0.1762139
## 65     65 7.357014 0.3124095 5.928402 0.2137734 0.02809695 0.1725881
## 66     66 7.358954 0.3120727 5.929031 0.2130497 0.02804398 0.1730578
## 67     67 7.360272 0.3118536 5.928302 0.2127026 0.02798154 0.1717442
## 68     68 7.362800 0.3114123 5.930514 0.2120391 0.02806323 0.1713144
## 69     69 7.364837 0.3110822 5.930976 0.2095514 0.02760061 0.1720075
## 70     70 7.368369 0.3104404 5.934648 0.2117241 0.02738723 0.1737884
## 71     71 7.370895 0.3099975 5.937129 0.2133448 0.02749566 0.1727933
## 72     72 7.372642 0.3097038 5.937622 0.2139265 0.02756466 0.1735625
## 73     73 7.374502 0.3094437 5.938890 0.2135857 0.02793861 0.1726770
## 74     74 7.376879 0.3090288 5.941432 0.2124061 0.02808915 0.1718190
## 75     75 7.379328 0.3086354 5.944246 0.2114256 0.02788580 0.1709617
## 76     76 7.381590 0.3083025 5.946640 0.2132497 0.02853662 0.1709647
## 77     77 7.381417 0.3083772 5.946596 0.2130661 0.02855534 0.1721240
## 78     78 7.381958 0.3082782 5.946871 0.2138494 0.02840641 0.1727322
## 79     79 7.381462 0.3084194 5.947110 0.2127159 0.02862531 0.1724024
## 80     80 7.380520 0.3085805 5.945039 0.2118363 0.02860007 0.1691762
## 81     81 7.380891 0.3085374 5.943359 0.2128878 0.02861812 0.1703036
## 82     82 7.381349 0.3084968 5.944909 0.2114056 0.02846851 0.1707193
## 83     83 7.382548 0.3082825 5.946018 0.2130197 0.02817863 0.1712788
## 84     84 7.383072 0.3082064 5.946413 0.2133854 0.02839816 0.1706101
## 85     85 7.387143 0.3075156 5.947810 0.2161496 0.02861655 0.1735255
## 86     86 7.390391 0.3069572 5.950900 0.2164555 0.02869045 0.1734721
## 87     87 7.393622 0.3063875 5.953385 0.2174516 0.02901762 0.1763916
## 88     88 7.396512 0.3059304 5.954959 0.2209848 0.02948080 0.1780013
## 89     89 7.395848 0.3060266 5.954886 0.2217369 0.02932433 0.1765934
## 90     90 7.395900 0.3060473 5.955249 0.2226634 0.02932733 0.1753455
## 91     91 7.396936 0.3059145 5.958740 0.2198843 0.02912557 0.1731676
## 92     92 7.397627 0.3057508 5.958854 0.2204861 0.02902603 0.1764264
## 93     93 7.398681 0.3055463 5.960063 0.2207602 0.02864251 0.1769399
## 94     94 7.399185 0.3054427 5.961256 0.2210416 0.02874394 0.1769982
## 95     95 7.400850 0.3051780 5.961746 0.2202112 0.02861033 0.1750621
## 96     96 7.399872 0.3053624 5.961707 0.2189871 0.02825631 0.1747233
## 97     97 7.400221 0.3053427 5.962089 0.2180487 0.02833258 0.1743527
## 98     98 7.400047 0.3053796 5.962058 0.2180440 0.02832883 0.1744246
## 99     99 7.402219 0.3049921 5.963588 0.2170855 0.02819623 0.1731806
## 100   100 7.401699 0.3050958 5.963234 0.2184996 0.02882200 0.1762169
## 101   101 7.402617 0.3049733 5.963226 0.2178876 0.02912379 0.1764657
## 102   102 7.404802 0.3045910 5.964204 0.2188923 0.02920958 0.1773987
## 103   103 7.406492 0.3042917 5.964155 0.2170121 0.02879212 0.1764967
## 104   104 7.407183 0.3041575 5.964580 0.2175402 0.02850542 0.1783902
## 105   105 7.406832 0.3042475 5.962823 0.2179076 0.02850458 0.1775886
## 106   106 7.407766 0.3040779 5.963018 0.2151847 0.02829810 0.1763748
## 107   107 7.406192 0.3043905 5.961505 0.2156541 0.02830980 0.1764204
## 108   108 7.407643 0.3041084 5.963112 0.2154172 0.02811331 0.1757464
## 109   109 7.408954 0.3038946 5.963932 0.2152848 0.02798184 0.1748363
## 110   110 7.408673 0.3039667 5.963660 0.2155417 0.02790181 0.1752319
## 111   111 7.408130 0.3040476 5.964339 0.2138980 0.02768721 0.1742737
## 112   112 7.407606 0.3041495 5.964488 0.2136673 0.02775347 0.1740353
## 113   113 7.407761 0.3041406 5.964446 0.2121305 0.02766691 0.1738914
## 114   114 7.408649 0.3039985 5.964702 0.2117078 0.02795437 0.1748353
## 115   115 7.409494 0.3038415 5.966088 0.2114748 0.02812088 0.1735165
## 116   116 7.407931 0.3041266 5.964464 0.2106773 0.02804111 0.1738992
## 117   117 7.408593 0.3039908 5.964968 0.2097060 0.02787951 0.1735239
## 118   118 7.407890 0.3041397 5.964567 0.2109205 0.02816028 0.1748620
## 119   119 7.407469 0.3042120 5.964031 0.2104583 0.02809430 0.1747219
## 120   120 7.407781 0.3041456 5.964219 0.2110002 0.02763890 0.1751302
## 121   121 7.407820 0.3041363 5.963397 0.2114140 0.02753136 0.1762049
## 122   122 7.408031 0.3041195 5.962912 0.2103651 0.02743616 0.1753552
## 123   123 7.408832 0.3039876 5.963007 0.2112953 0.02750479 0.1755535
## 124   124 7.408995 0.3039551 5.962893 0.2115075 0.02734505 0.1747300
## 125   125 7.410287 0.3037384 5.964203 0.2126376 0.02718242 0.1764707
## 126   126 7.412392 0.3034176 5.965816 0.2132934 0.02745270 0.1770240
## 127   127 7.414200 0.3031143 5.967227 0.2129947 0.02721680 0.1775143
## 128   128 7.413749 0.3031555 5.966981 0.2124110 0.02716576 0.1773155
## 129   129 7.413335 0.3032163 5.966210 0.2105696 0.02660718 0.1761889
## 130   130 7.413238 0.3032315 5.966123 0.2088968 0.02638096 0.1749019
## 131   131 7.413887 0.3031298 5.967092 0.2095732 0.02626948 0.1767898
## 132   132 7.414526 0.3030335 5.967501 0.2101359 0.02657464 0.1759187
## 133   133 7.414520 0.3030404 5.967160 0.2110118 0.02666259 0.1764880
## 134   134 7.415460 0.3028768 5.967888 0.2119981 0.02664633 0.1782771
## 135   135 7.414623 0.3030090 5.968297 0.2118134 0.02677130 0.1774007
## 136   136 7.414485 0.3030387 5.969298 0.2107660 0.02714329 0.1772760
## 137   137 7.414213 0.3031070 5.969084 0.2108933 0.02716592 0.1767901
## 138   138 7.414893 0.3030051 5.969033 0.2111026 0.02709466 0.1758894
## 139   139 7.414737 0.3030466 5.969519 0.2111290 0.02731600 0.1747208
## 140   140 7.415132 0.3029769 5.969506 0.2103484 0.02722187 0.1747292
## 141   141 7.413559 0.3032615 5.968951 0.2124278 0.02738007 0.1766996
## 142   142 7.413450 0.3032955 5.968661 0.2123908 0.02749983 0.1751835
## 143   143 7.413738 0.3032492 5.968989 0.2130867 0.02754068 0.1766707
## 144   144 7.415774 0.3029210 5.969321 0.2136695 0.02781210 0.1764710
## 145   145 7.415912 0.3028995 5.969392 0.2127431 0.02775587 0.1757566
## 146   146 7.415685 0.3029702 5.969058 0.2136792 0.02797315 0.1761291
## 147   147 7.416598 0.3028240 5.969406 0.2137532 0.02798756 0.1748992
## 148   148 7.416695 0.3028130 5.969495 0.2126941 0.02774530 0.1748185
## 149   149 7.415834 0.3029473 5.969142 0.2139106 0.02779524 0.1756393
## 150   150 7.416066 0.3029119 5.970030 0.2144666 0.02776182 0.1752156
## 151   151 7.415372 0.3030243 5.968924 0.2148953 0.02790312 0.1764238
## 152   152 7.413162 0.3033710 5.966900 0.2146795 0.02793823 0.1758274
## 153   153 7.413296 0.3033508 5.966792 0.2153146 0.02783027 0.1768269
## 154   154 7.413567 0.3033306 5.966214 0.2147205 0.02761192 0.1763787
## 155   155 7.414294 0.3031904 5.966780 0.2140317 0.02769309 0.1756586
## 156   156 7.415321 0.3030027 5.967272 0.2162081 0.02796158 0.1766884
## 157   157 7.414547 0.3031288 5.966734 0.2165369 0.02783867 0.1767180
## 158   158 7.413756 0.3032641 5.965843 0.2154851 0.02760970 0.1769860
## 159   159 7.413943 0.3032224 5.965436 0.2156547 0.02749353 0.1776613
## 160   160 7.413693 0.3032420 5.964915 0.2162995 0.02743934 0.1784705
## 161   161 7.414013 0.3031980 5.965638 0.2164560 0.02751273 0.1779872
## 162   162 7.413849 0.3032345 5.965613 0.2162642 0.02753620 0.1774941
## 163   163 7.413590 0.3032802 5.965375 0.2163090 0.02759187 0.1780417
## 164   164 7.414018 0.3031993 5.965842 0.2166605 0.02771743 0.1776114
## 165   165 7.413989 0.3032309 5.965811 0.2173701 0.02773400 0.1783460
## 166   166 7.414390 0.3031620 5.965996 0.2178189 0.02777049 0.1797058
## 167   167 7.414730 0.3030920 5.966735 0.2178300 0.02762772 0.1791741
## 168   168 7.415007 0.3030545 5.966736 0.2188261 0.02769299 0.1798868
## 169   169 7.415986 0.3028714 5.968028 0.2178380 0.02759324 0.1795757
## 170   170 7.416481 0.3027932 5.968452 0.2173340 0.02763162 0.1797566
## 171   171 7.416696 0.3027466 5.968396 0.2166986 0.02762269 0.1791674
## 172   172 7.415573 0.3029373 5.967708 0.2169598 0.02761507 0.1802569
## 173   173 7.416084 0.3028671 5.968090 0.2164121 0.02776676 0.1790246
## 174   174 7.415344 0.3030063 5.968142 0.2172208 0.02788521 0.1792347
## 175   175 7.415686 0.3029505 5.968173 0.2171209 0.02783923 0.1790455
## 176   176 7.415381 0.3030023 5.968500 0.2172677 0.02781498 0.1791627
## 177   177 7.415852 0.3029253 5.968308 0.2169166 0.02783590 0.1790689
## 178   178 7.415332 0.3030170 5.967812 0.2164063 0.02777476 0.1784562
## 179   179 7.415635 0.3029659 5.968176 0.2163463 0.02777616 0.1786813
## 180   180 7.416079 0.3028795 5.968489 0.2167845 0.02774615 0.1792370
## 181   181 7.416463 0.3028145 5.968426 0.2170034 0.02766527 0.1792698
## 182   182 7.416189 0.3028667 5.967741 0.2179528 0.02776289 0.1808077
## 183   183 7.416013 0.3028951 5.968171 0.2176569 0.02771246 0.1806078
## 184   184 7.415018 0.3030655 5.967719 0.2176558 0.02775497 0.1808450
## 185   185 7.415114 0.3030434 5.967798 0.2164678 0.02762200 0.1801910
## 186   186 7.415199 0.3030277 5.967707 0.2169532 0.02754789 0.1807584
## 187   187 7.415708 0.3029461 5.968406 0.2167025 0.02759108 0.1810856
## 188   188 7.416181 0.3028541 5.968595 0.2164691 0.02749355 0.1810737
## 189   189 7.416757 0.3027451 5.969024 0.2164784 0.02753738 0.1814986
## 190   190 7.416825 0.3027304 5.968955 0.2167165 0.02749327 0.1816525
## 191   191 7.417240 0.3026637 5.969062 0.2167784 0.02747771 0.1816469
## 192   192 7.416982 0.3026998 5.968620 0.2167598 0.02746233 0.1815271
## 193   193 7.416994 0.3026971 5.968688 0.2168178 0.02743488 0.1822973
## 194   194 7.416577 0.3027773 5.968128 0.2174357 0.02758863 0.1824336
## 195   195 7.417164 0.3026794 5.968581 0.2168966 0.02743971 0.1819525
## 196   196 7.416390 0.3028115 5.968368 0.2172068 0.02753512 0.1822314
## 197   197 7.416698 0.3027541 5.968431 0.2174134 0.02757691 0.1825046
## 198   198 7.416894 0.3027146 5.968347 0.2172096 0.02752499 0.1822889
## 199   199 7.416646 0.3027589 5.967923 0.2184963 0.02752127 0.1836196
## 200   200 7.416667 0.3027597 5.967911 0.2184510 0.02743006 0.1837442
## 201   201 7.416748 0.3027490 5.967821 0.2183398 0.02751077 0.1836518
## 202   202 7.416376 0.3028147 5.967464 0.2184069 0.02748437 0.1833339
## 203   203 7.416268 0.3028263 5.967124 0.2180390 0.02740998 0.1832961
## 204   204 7.416053 0.3028576 5.967011 0.2182767 0.02737663 0.1833745
## 205   205 7.416201 0.3028252 5.967469 0.2184297 0.02739295 0.1835887
## 206   206 7.416567 0.3027618 5.968071 0.2189288 0.02747299 0.1838592
## 207   207 7.416392 0.3027927 5.967901 0.2189979 0.02741995 0.1837454
## 208   208 7.416740 0.3027251 5.968339 0.2188684 0.02735103 0.1836036
## 209   209 7.416656 0.3027461 5.968280 0.2190692 0.02735669 0.1837239
## 210   210 7.416854 0.3027143 5.968280 0.2189287 0.02732188 0.1833164
## 211   211 7.416546 0.3027638 5.968178 0.2187051 0.02728187 0.1831357
## 212   212 7.416659 0.3027433 5.968224 0.2187173 0.02730676 0.1829938
## 213   213 7.416523 0.3027703 5.968247 0.2187820 0.02737616 0.1829880
## 214   214 7.416767 0.3027298 5.968421 0.2189784 0.02738286 0.1832739
## 215   215 7.416593 0.3027683 5.968464 0.2191282 0.02745665 0.1832824
## 216   216 7.416868 0.3027201 5.968650 0.2191932 0.02742629 0.1834929
## 217   217 7.416506 0.3027815 5.968160 0.2187987 0.02741431 0.1830188
## 218   218 7.416357 0.3028043 5.967811 0.2188477 0.02738612 0.1830234
## 219   219 7.416180 0.3028286 5.967639 0.2185550 0.02731928 0.1829470
## 220   220 7.416280 0.3028124 5.967795 0.2182898 0.02732188 0.1824726
## 221   221 7.416118 0.3028430 5.967659 0.2181610 0.02732343 0.1822512
## 222   222 7.416195 0.3028293 5.967717 0.2180447 0.02731438 0.1820859
## 223   223 7.416345 0.3028024 5.967802 0.2181462 0.02737091 0.1821911
## 224   224 7.416384 0.3027934 5.967811 0.2181387 0.02735555 0.1821795
## 225   225 7.416390 0.3027907 5.967778 0.2180434 0.02731848 0.1820977
## 226   226 7.416468 0.3027766 5.967844 0.2179505 0.02729115 0.1819547
## 227   227 7.416482 0.3027754 5.967785 0.2179532 0.02727520 0.1820210
## 228   228 7.416638 0.3027505 5.967947 0.2180478 0.02726849 0.1820805
## 229   229 7.416303 0.3028058 5.967631 0.2181335 0.02728514 0.1821199
## 230   230 7.416250 0.3028145 5.967632 0.2180098 0.02726442 0.1819889
## 231   231 7.416267 0.3028119 5.967682 0.2178394 0.02723258 0.1817953
## 232   232 7.416231 0.3028211 5.967587 0.2177783 0.02723500 0.1816916
## 233   233 7.416227 0.3028214 5.967588 0.2176996 0.02723687 0.1816360
## 234   234 7.416163 0.3028312 5.967460 0.2176465 0.02723778 0.1816568
## 235   235 7.416083 0.3028448 5.967348 0.2176762 0.02724708 0.1817396
## 236   236 7.416058 0.3028499 5.967376 0.2176759 0.02726072 0.1816618
## 237   237 7.416085 0.3028466 5.967409 0.2176160 0.02726014 0.1816138
## 238   238 7.416116 0.3028413 5.967417 0.2176928 0.02727097 0.1816849
## 239   239 7.416087 0.3028466 5.967375 0.2176606 0.02726567 0.1816421
## 240   240 7.416075 0.3028485 5.967363 0.2176692 0.02726589 0.1816577
##    nvmax
## 12    12
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Coefficients of final model:

##   (Intercept)            x4            x7            x8            x9 
##  8.645541e+01 -1.710005e-02  3.277989e+00  1.514512e-01  9.774137e-01 
##           x10           x11           x16           x17        stat14 
##  4.530726e-01  7.856504e+07  2.800699e-01  4.037473e-01 -2.873338e-01 
##        stat98       stat110      sqrt.x18 
##  9.434815e-01 -8.644369e-01  7.264493e+00

Test

if (algo.backward.caret == TRUE){
  test.model(model.backward, data.test
             ,method = 'leapBackward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   107.0   120.4   123.8   123.9   127.4   139.2 
## [1] "leapBackward  Test MSE: 95.3468824372506"

Stepwise Selection (w/ full train)

Train

if (algo.stepwise == TRUE){
  t1 = Sys.time()
  
  model.stepwise = step(model.null, scope=list(upper=model.full), data = data.train, direction="both", trace = 0)
  print(summary(model.stepwise))
  #saveRDS(model.stepwise,file = "model_stepwise.rds")
  
  t2 = Sys.time()
  print (paste("Time taken for Stepwise Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.stepwise, data.train)
}

Test

if (algo.stepwise == TRUE){
  test.model(model.stepwise, data.test, "Stepwise Selection")
}

Stepwise Selection (w/ filtered train)

Train

if (algo.stepwise == TRUE){
  t1 = Sys.time()
  
  model.stepwise2 = step(model.null2, scope=list(upper=model.full2), data = data.train2, direction="both", trace = 0)
  print(summary(model.stepwise2))
  #saveRDS(model.forward,file = "model_stepwise.rds")
  
  t2 = Sys.time()
  print (paste("Time taken for Stepwise Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.stepwise2, data.train2)
}

Test

if (algo.stepwise == TRUE){
  test.model(model.stepwise2, data.test, "Stepwise Selection (2)")
}

Stepwise Selection with CV (w/ full train)

Train

if (algo.stepwise.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "leapSeq"
                                   ,feature.names = feature.names)
  model.stepwise = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 9 on full training set
##     nvmax      RMSE  Rsquared      MAE    RMSESD RsquaredSD     MAESD
## 1       1 10.190312 0.1031351 7.782556 0.2986960 0.01510985 0.1630465
## 2       2  9.959863 0.1431490 7.586895 0.3329232 0.01527567 0.1507478
## 3       3  9.800520 0.1706239 7.425932 0.3371193 0.02089715 0.1510136
## 4       4  9.664292 0.1935352 7.225063 0.3244736 0.02143874 0.1193851
## 5       5  9.581672 0.2073775 7.156896 0.3424689 0.02081535 0.1340120
## 6       6  9.555257 0.2117661 7.137916 0.3542029 0.02335306 0.1291911
## 7       7  9.559823 0.2111601 7.139880 0.3644482 0.02461551 0.1382767
## 8       8  9.547579 0.2133689 7.128788 0.3651827 0.02538635 0.1329768
## 9       9  9.521929 0.2176251 7.121917 0.3669530 0.02593030 0.1336222
## 10     10  9.527194 0.2165947 7.122196 0.3669707 0.02380088 0.1352945
## 11     11  9.533718 0.2154997 7.126706 0.3659198 0.02332300 0.1364556
## 12     12  9.541785 0.2142511 7.134061 0.3701140 0.02326252 0.1373844
## 13     13  9.534556 0.2154392 7.126870 0.3663178 0.02296114 0.1288239
## 14     14  9.536855 0.2150780 7.132556 0.3624797 0.02219343 0.1294582
## 15     15  9.544673 0.2138847 7.140628 0.3656304 0.02279245 0.1329859
## 16     16  9.547165 0.2134787 7.142068 0.3643487 0.02342147 0.1308032
## 17     17  9.654474 0.1950704 7.220345 0.4172855 0.04720590 0.2379057
## 18     18  9.541776 0.2143662 7.132028 0.3554484 0.02312240 0.1287582
## 19     19  9.551398 0.2129261 7.135442 0.3634061 0.02315957 0.1356564
## 20     20  9.553320 0.2126449 7.137766 0.3731946 0.02387889 0.1382055
## 21     21  9.734666 0.1818279 7.321607 0.5333316 0.07040266 0.3609003
## 22     22  9.564709 0.2108593 7.141372 0.3617888 0.02269872 0.1363974
## 23     23  9.633895 0.1991282 7.194892 0.4037053 0.04457317 0.2126384
## 24     24  9.569426 0.2101645 7.148424 0.3568115 0.02251044 0.1288990
## 25     25  9.657035 0.1956979 7.238927 0.5180501 0.05999439 0.3172019
## 26     26  9.579833 0.2087190 7.154649 0.3602064 0.02294081 0.1293800
## 27     27  9.670611 0.1937112 7.246580 0.5105721 0.05917500 0.3102672
## 28     28  9.585648 0.2078892 7.161082 0.3499625 0.02134787 0.1183348
## 29     29  9.584958 0.2081113 7.158470 0.3584721 0.02175998 0.1327570
## 30     30  9.667198 0.1945886 7.233744 0.5628045 0.05720298 0.3332814
## 31     31  9.649687 0.1961804 7.218017 0.3713830 0.04672142 0.2194825
## 32     32  9.670232 0.1935050 7.233531 0.3814738 0.04455640 0.2341396
## 33     33  9.591912 0.2071763 7.161583 0.3523344 0.02191520 0.1262565
## 34     34  9.660070 0.1945603 7.224939 0.3726388 0.04696951 0.2207257
## 35     35  9.755583 0.1797182 7.306319 0.6450356 0.06934900 0.4248753
## 36     36  9.680827 0.1919309 7.236238 0.3867735 0.04601127 0.2370203
## 37     37  9.777602 0.1764623 7.334808 0.6644921 0.06922252 0.4500402
## 38     38  9.607585 0.2050016 7.167672 0.3657610 0.02364328 0.1353432
## 39     39  9.747404 0.1812216 7.287511 0.5716517 0.06545754 0.3432910
## 40     40  9.697809 0.1900675 7.264534 0.5123872 0.05866549 0.3177405
## 41     41  9.713066 0.1869864 7.241897 0.4040167 0.04370842 0.2303426
## 42     42  9.798438 0.1727003 7.361193 0.5260436 0.05870726 0.3813545
## 43     43  9.719790 0.1864345 7.278481 0.5223136 0.04949772 0.3547977
## 44     44  9.686353 0.1907700 7.245476 0.3735687 0.04699500 0.2157123
## 45     45  9.699799 0.1893415 7.262484 0.3952091 0.04615861 0.2358106
## 46     46  9.727336 0.1853896 7.284780 0.5212262 0.04913982 0.3485038
## 47     47  9.803307 0.1716235 7.334471 0.4063110 0.05417708 0.2835556
## 48     48  9.628741 0.2021067 7.185416 0.3682602 0.02502653 0.1334039
## 49     49  9.827353 0.1683515 7.354167 0.5281837 0.05561040 0.3821111
## 50     50  9.716926 0.1877082 7.282831 0.5187257 0.05895392 0.3195152
## 51     51  9.633232 0.2015018 7.189299 0.3732839 0.02494934 0.1442339
## 52     52  9.816334 0.1704065 7.368766 0.5227145 0.05794595 0.3777259
## 53     53  9.705215 0.1893030 7.255984 0.5482240 0.05475961 0.3155241
## 54     54  9.757951 0.1789186 7.277532 0.3757459 0.05696282 0.2193548
## 55     55  9.723729 0.1863095 7.266371 0.5164473 0.05340109 0.3328691
## 56     56  9.783102 0.1755381 7.316328 0.4134195 0.05551849 0.2584775
## 57     57  9.643377 0.2001960 7.194485 0.3748403 0.02497436 0.1479849
## 58     58  9.950411 0.1461824 7.491150 0.5987729 0.07212559 0.4334942
## 59     59  9.646000 0.1999560 7.194594 0.3770139 0.02576588 0.1442080
## 60     60  9.644844 0.2000926 7.194016 0.3760512 0.02535946 0.1462961
## 61     61  9.783611 0.1759157 7.307232 0.5339181 0.06243365 0.3453435
## 62     62  9.727608 0.1864307 7.284009 0.5134452 0.05779036 0.3126697
## 63     63  9.792911 0.1743667 7.338544 0.5469939 0.06431641 0.3483481
## 64     64  9.897344 0.1555176 7.393931 0.3593943 0.06201989 0.2847526
## 65     65  9.654908 0.1987652 7.201727 0.3830757 0.02665891 0.1475707
## 66     66  9.656953 0.1985485 7.202676 0.3830062 0.02671082 0.1478926
## 67     67  9.725455 0.1855819 7.266714 0.3854845 0.04758601 0.2159378
## 68     68  9.799825 0.1735470 7.332229 0.4276426 0.05707040 0.2638275
## 69     69  9.760715 0.1807482 7.277111 0.4108614 0.04337169 0.2370464
## 70     70  9.786698 0.1749259 7.300001 0.3856357 0.06003977 0.2254303
## 71     71  9.658648 0.1983548 7.208261 0.3830775 0.02774644 0.1478305
## 72     72  9.719996 0.1876577 7.252017 0.4122612 0.04509212 0.1985166
## 73     73  9.804599 0.1718987 7.349854 0.3966717 0.05873620 0.2743080
## 74     74  9.657656 0.1986256 7.205896 0.3773908 0.02714189 0.1451802
## 75     75  9.835629 0.1683770 7.379196 0.6095733 0.06286493 0.4416994
## 76     76  9.715628 0.1883078 7.254073 0.4092194 0.04596392 0.1981698
## 77     77  9.897862 0.1563450 7.422562 0.5761349 0.07030229 0.4374647
## 78     78  9.798758 0.1747374 7.344334 0.5346844 0.06738385 0.3317142
## 79     79  9.653743 0.1992964 7.204851 0.3778929 0.02724880 0.1491632
## 80     80  9.830589 0.1691231 7.375413 0.6057932 0.06199784 0.4367011
## 81     81  9.755446 0.1818322 7.281923 0.4083528 0.04445282 0.2386538
## 82     82  9.816513 0.1709216 7.360509 0.4171087 0.05982547 0.2817777
## 83     83  9.794307 0.1748119 7.340409 0.6316921 0.06943683 0.4141603
## 84     84  9.862279 0.1628284 7.386668 0.5554768 0.07100304 0.3536816
## 85     85  9.729713 0.1859590 7.279146 0.5058373 0.05330602 0.3211548
## 86     86  9.889255 0.1585305 7.396638 0.5568935 0.06796105 0.3620510
## 87     87  9.762066 0.1814206 7.312187 0.5071793 0.04594625 0.3387590
## 88     88  9.658944 0.1986346 7.214359 0.3701622 0.02709498 0.1403273
## 89     89  9.660904 0.1983185 7.217223 0.3727949 0.02687345 0.1444490
## 90     90  9.913915 0.1549664 7.468054 0.5885821 0.07207830 0.4244982
## 91     91  9.750589 0.1830129 7.307783 0.4096286 0.05010589 0.2501629
## 92     92  9.901334 0.1568859 7.437095 0.6056443 0.06713399 0.4310616
## 93     93  9.799782 0.1731616 7.334818 0.4081883 0.06002026 0.2601197
## 94     94  9.882269 0.1592193 7.416734 0.5426872 0.07260698 0.3711464
## 95     95  9.672308 0.1968489 7.229079 0.3774823 0.02702521 0.1488123
## 96     96  9.746650 0.1837091 7.298002 0.5158382 0.05427028 0.3330365
## 97     97  9.754714 0.1825742 7.312505 0.4081115 0.04975648 0.2468809
## 98     98  9.802473 0.1741697 7.343793 0.5728633 0.06632663 0.3498127
## 99     99  9.747214 0.1836080 7.292476 0.3934976 0.04626876 0.2208171
## 100   100  9.903614 0.1555331 7.447395 0.5020575 0.07090314 0.3761001
## 101   101  9.828272 0.1693663 7.346440 0.4244671 0.05498304 0.2676316
## 102   102  9.745761 0.1837817 7.292058 0.3886641 0.04580255 0.2199246
## 103   103  9.833782 0.1686943 7.381184 0.5256984 0.06518822 0.3649990
## 104   104  9.869834 0.1632524 7.396714 0.5086733 0.05208318 0.3696423
## 105   105  9.829408 0.1699955 7.378597 0.5671174 0.06804837 0.3675771
## 106   106  9.675372 0.1964609 7.230769 0.3708041 0.02597633 0.1451272
## 107   107  9.907003 0.1555240 7.409768 0.5285254 0.07032486 0.3623992
## 108   108  9.835751 0.1696178 7.387860 0.5260623 0.06725967 0.3477218
## 109   109  9.755816 0.1825570 7.303086 0.5189964 0.05430519 0.3273739
## 110   110  9.815215 0.1714696 7.360849 0.5491860 0.06765219 0.3659100
## 111   111  9.816717 0.1706181 7.343853 0.3593832 0.05938465 0.2295525
## 112   112  9.775055 0.1798126 7.329922 0.5091027 0.04599479 0.3411382
## 113   113  9.676257 0.1964152 7.230682 0.3728357 0.02620619 0.1443864
## 114   114  9.889561 0.1584072 7.412960 0.5227405 0.07312954 0.3525130
## 115   115  9.750502 0.1826658 7.296476 0.3801053 0.04946209 0.2313095
## 116   116  9.761853 0.1816398 7.313575 0.4090703 0.05011475 0.2461135
## 117   117  9.743434 0.1838320 7.278972 0.3509111 0.04938221 0.1775772
## 118   118  9.769546 0.1820272 7.329479 0.5313481 0.05940778 0.3350915
## 119   119  9.680002 0.1959840 7.232627 0.3727220 0.02529123 0.1433026
## 120   120  9.718325 0.1882676 7.258516 0.3468530 0.03785384 0.1473159
## 121   121  9.925440 0.1523719 7.480638 0.4558523 0.06356357 0.3334553
## 122   122  9.735489 0.1863742 7.276174 0.4023792 0.04258972 0.1979448
## 123   123  9.884223 0.1615507 7.443126 0.5640306 0.06772518 0.3837877
## 124   124  9.736317 0.1862642 7.279665 0.4040432 0.04293485 0.2003707
## 125   125  9.680858 0.1958885 7.233691 0.3759796 0.02618569 0.1474511
## 126   126  9.764717 0.1799060 7.312905 0.4712684 0.05223982 0.2830683
## 127   127  9.833095 0.1696104 7.381129 0.4675301 0.05018611 0.3276642
## 128   128  9.891523 0.1580545 7.432078 0.4764370 0.06352596 0.3165078
## 129   129  9.734237 0.1866134 7.277704 0.4060792 0.04250229 0.2032923
## 130   130  9.808170 0.1735256 7.349560 0.4810475 0.05422819 0.3184184
## 131   131  9.749046 0.1836677 7.296752 0.3793331 0.04562425 0.2270764
## 132   132  9.728455 0.1879315 7.271318 0.3993995 0.03888730 0.1882972
## 133   133  9.729240 0.1871377 7.279975 0.4505691 0.04021413 0.2580556
## 134   134  9.680199 0.1960284 7.232287 0.3778719 0.02591808 0.1486330
## 135   135  9.741577 0.1854860 7.276242 0.3733469 0.02913383 0.1873979
## 136   136  9.681354 0.1959065 7.233902 0.3792700 0.02599518 0.1487913
## 137   137  9.827225 0.1698229 7.336466 0.3230805 0.03894169 0.1997725
## 138   138  9.716672 0.1890738 7.248485 0.3511264 0.03534189 0.1425053
## 139   139  9.866232 0.1653153 7.432034 0.5000916 0.05078080 0.3082747
## 140   140  9.681357 0.1959377 7.234114 0.3745383 0.02564463 0.1479013
## 141   141  9.770539 0.1804953 7.309576 0.4843665 0.05019840 0.2618941
## 142   142  9.732007 0.1875017 7.274303 0.3964004 0.03926987 0.1880576
## 143   143  9.747734 0.1850875 7.299084 0.4425696 0.03051123 0.2432576
## 144   144  9.841551 0.1682881 7.393443 0.4518292 0.05251886 0.2937025
## 145   145  9.745457 0.1849697 7.280403 0.3747167 0.03078549 0.1910265
## 146   146  9.746511 0.1852239 7.300083 0.4431563 0.03056860 0.2457204
## 147   147  9.740215 0.1864599 7.300890 0.4668642 0.04710173 0.2509893
## 148   148  9.743723 0.1856718 7.298298 0.4439658 0.03077669 0.2464817
## 149   149  9.720702 0.1888668 7.271742 0.4747865 0.04288321 0.2394308
## 150   150  9.680350 0.1961718 7.237013 0.3759949 0.02566275 0.1486301
## 151   151  9.679111 0.1963327 7.236872 0.3771569 0.02579658 0.1498578
## 152   152  9.795813 0.1766614 7.321412 0.3900875 0.04033969 0.2177235
## 153   153  9.732474 0.1874996 7.277401 0.4006803 0.03978414 0.1918501
## 154   154  9.747315 0.1847956 7.283769 0.3760441 0.03044892 0.1936903
## 155   155  9.732439 0.1870849 7.286681 0.3790090 0.03580759 0.1973204
## 156   156  9.727429 0.1874138 7.273262 0.3690166 0.03729089 0.1776239
## 157   157  9.681227 0.1960594 7.237385 0.3782965 0.02584149 0.1527437
## 158   158  9.681445 0.1960346 7.237682 0.3782312 0.02587495 0.1540619
## 159   159  9.849918 0.1679146 7.403318 0.5885229 0.05656753 0.3518097
## 160   160  9.682846 0.1958435 7.238035 0.3813388 0.02606633 0.1571995
## 161   161  9.683462 0.1957520 7.238646 0.3817146 0.02616007 0.1577236
## 162   162  9.811434 0.1749692 7.368982 0.5257148 0.04878707 0.3155117
## 163   163  9.730312 0.1870555 7.276039 0.3731512 0.03716683 0.1814440
## 164   164  9.801036 0.1758603 7.352531 0.4448019 0.03758246 0.2712570
## 165   165  9.724374 0.1883983 7.275098 0.4815374 0.04335356 0.2479793
## 166   166  9.685611 0.1954958 7.240482 0.3868206 0.02652134 0.1622021
## 167   167  9.749145 0.1853764 7.308368 0.4824723 0.04849250 0.2654205
## 168   168  9.687422 0.1952193 7.242378 0.3854985 0.02648754 0.1617974
## 169   169  9.732255 0.1867995 7.278796 0.3757282 0.03741461 0.1859149
## 170   170  9.687054 0.1952694 7.242107 0.3854533 0.02649511 0.1628453
## 171   171  9.782866 0.1778288 7.323898 0.4525407 0.04782311 0.2777314
## 172   172  9.836234 0.1687083 7.388368 0.4566423 0.05307489 0.2789295
## 173   173  9.747918 0.1855425 7.308766 0.4773201 0.04764896 0.2634436
## 174   174  9.798464 0.1756355 7.354199 0.3692875 0.04444249 0.2224292
## 175   175  9.686989 0.1952873 7.243054 0.3859782 0.02638948 0.1610834
## 176   176  9.686239 0.1954047 7.242812 0.3854318 0.02633570 0.1619176
## 177   177  9.686319 0.1953972 7.242967 0.3863302 0.02644928 0.1624842
## 178   178  9.802012 0.1756167 7.363993 0.3804724 0.04345928 0.2307627
## 179   179  9.755033 0.1840513 7.306487 0.4584206 0.03270745 0.2561347
## 180   180  9.755383 0.1839991 7.306840 0.4586171 0.03270185 0.2563784
## 181   181  9.751799 0.1843480 7.316456 0.3908958 0.03855718 0.2151562
## 182   182  9.842840 0.1681370 7.384620 0.5227583 0.04974054 0.3208463
## 183   183  9.733299 0.1867254 7.283383 0.3788778 0.03795864 0.1917107
## 184   184  9.730076 0.1873668 7.262247 0.3587793 0.03795580 0.1573252
## 185   185  9.782033 0.1787807 7.330825 0.4848681 0.04937990 0.2824692
## 186   186  9.738007 0.1862692 7.288748 0.4683361 0.04216124 0.2685246
## 187   187  9.688087 0.1951514 7.244533 0.3883820 0.02648368 0.1634847
## 188   188  9.687498 0.1952262 7.243998 0.3880765 0.02662024 0.1627912
## 189   189  9.809518 0.1750920 7.354672 0.4754498 0.04301042 0.2827123
## 190   190  9.688076 0.1951638 7.244673 0.3886770 0.02667861 0.1631713
## 191   191  9.748264 0.1854893 7.309216 0.4808897 0.04807283 0.2644831
## 192   192  9.688433 0.1950903 7.244603 0.3891249 0.02652796 0.1629784
## 193   193  9.792354 0.1774126 7.328695 0.4517356 0.05365808 0.2564879
## 194   194  9.736348 0.1862698 7.284802 0.3792676 0.03832345 0.1911584
## 195   195  9.756531 0.1835674 7.296462 0.3890887 0.03182146 0.2113342
## 196   196  9.742277 0.1862350 7.290865 0.4136438 0.04097321 0.2093620
## 197   197  9.688727 0.1950292 7.245219 0.3877946 0.02633177 0.1608195
## 198   198  9.688526 0.1950561 7.245291 0.3886547 0.02647466 0.1616773
## 199   199  9.734570 0.1871778 7.287538 0.5002729 0.04507389 0.2683517
## 200   200  9.688306 0.1950794 7.245339 0.3881484 0.02640337 0.1617859
## 201   201  9.687820 0.1951452 7.245034 0.3882991 0.02640802 0.1614124
## 202   202  9.742145 0.1857082 7.293592 0.4776602 0.04352793 0.2779112
## 203   203  9.686687 0.1953210 7.244275 0.3882422 0.02633076 0.1612657
## 204   204  9.686414 0.1953600 7.244124 0.3883948 0.02626145 0.1618459
## 205   205  9.687327 0.1952193 7.245070 0.3883671 0.02627019 0.1618590
## 206   206  9.688045 0.1951112 7.245373 0.3888316 0.02624860 0.1621414
## 207   207  9.761871 0.1829628 7.312168 0.4705116 0.03489848 0.2659532
## 208   208  9.743853 0.1855568 7.295744 0.3908591 0.03731808 0.2075973
## 209   209  9.689119 0.1949577 7.246420 0.3884732 0.02616674 0.1614659
## 210   210  9.689269 0.1949389 7.246530 0.3886716 0.02617977 0.1621111
## 211   211  9.689969 0.1948508 7.247384 0.3890683 0.02630843 0.1623180
## 212   212  9.734785 0.1866828 7.266820 0.3586624 0.03822925 0.1554284
## 213   213  9.734895 0.1871596 7.290411 0.4999865 0.04492193 0.2732782
## 214   214  9.689415 0.1949425 7.246903 0.3896395 0.02636160 0.1627810
## 215   215  9.758518 0.1832770 7.298001 0.3907720 0.03199898 0.2127057
## 216   216  9.689278 0.1949713 7.246751 0.3899865 0.02644316 0.1631724
## 217   217  9.747691 0.1856664 7.295209 0.4223884 0.04245461 0.2190336
## 218   218  9.736564 0.1864552 7.267287 0.3590938 0.03874624 0.1564091
## 219   219  9.752429 0.1849887 7.313641 0.4867945 0.04852542 0.2689837
## 220   220  9.734564 0.1872666 7.291313 0.5000249 0.04473423 0.2772701
## 221   221  9.758629 0.1832906 7.298177 0.3917607 0.03200313 0.2140430
## 222   222  9.764330 0.1826364 7.314393 0.4738144 0.03537563 0.2694166
## 223   223  9.807797 0.1755851 7.365075 0.4832151 0.05341724 0.2902535
## 224   224  9.689345 0.1949736 7.246635 0.3909025 0.02648784 0.1639960
## 225   225  9.745197 0.1860474 7.293446 0.4203605 0.04163275 0.2166421
## 226   226  9.818496 0.1733137 7.363731 0.4655779 0.04093836 0.2856130
## 227   227  9.817508 0.1740107 7.345520 0.4143677 0.04353102 0.2471836
## 228   228  9.743332 0.1856811 7.295181 0.3922166 0.03708896 0.2064916
## 229   229  9.689107 0.1950106 7.246410 0.3908175 0.02649283 0.1637711
## 230   230  9.743762 0.1853611 7.293283 0.3838211 0.03987269 0.2043493
## 231   231  9.760175 0.1831041 7.299612 0.3940028 0.03266806 0.2176058
## 232   232  9.689154 0.1950023 7.246441 0.3908435 0.02647868 0.1638115
## 233   233  9.863905 0.1649704 7.368986 0.3329329 0.04417739 0.2237182
## 234   234  9.753396 0.1844556 7.298141 0.5013827 0.04654672 0.2887692
## 235   235  9.936329 0.1533771 7.462504 0.5169229 0.05298987 0.3496056
## 236   236  9.826559 0.1728204 7.368953 0.4851023 0.05035900 0.2980974
## 237   237  9.754535 0.1840940 7.322107 0.3945071 0.03834973 0.2207534
## 238   238  9.739059 0.1861224 7.265125 0.3601101 0.03976859 0.1562979
## 239   239  9.991551 0.1447417 7.520393 0.4440741 0.05881886 0.3055987
## 240   240  9.689033 0.1950205 7.246237 0.3907893 0.02647943 0.1636052
##   nvmax
## 9     9
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Coefficients of final model:

## (Intercept)          x4          x7          x9         x10         x16 
##  96.8126929  -0.0150172   3.1893069   0.9833780   0.3546913   0.2736344 
##         x17      stat98     stat110    sqrt.x18 
##   0.4098622   1.0203902  -0.9352919   7.5838139

Test

if (algo.stepwise.caret == TRUE){
  # test.model(model.stepwise, data.test, "Stepwise Selection", draw.limits = TRUE, regsubset = TRUE, id = id, formula = formula)
  test.model(model.stepwise, data.test
             ,method = 'leapSeq',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE)
  
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   109.0   121.5   125.1   125.2   128.9   139.6 
## [1] "leapSeq  Test MSE: 93.5015990422204"

Stepwise Selection with CV (w/ filtered train)

Train

Test

LASSO (w/ full train)

Train

if(algo.LASSO == TRUE){
  # Formatting data for GLM net
  # you can use model.matrix as well -- model.matrix creates a design (or model) matrix, 
  # e.g., by expanding factors to a set of dummy variables (depending on the contrasts) 
  # and expanding interactions similarly.
  x = as.matrix(data.train[,feature.names])
  y = data.train[,label.names]
  
  xtest = as.matrix(data.test[,feature.names]) 
  ytest = data.test[,label.names] 
  
  grid=10^seq(10,-2, length =100)
  
  set.seed(1)
  model.LASSO=glmnet(x,y,alpha=1, lambda =grid)
  
  cv.out=cv.glmnet(x,y,alpha=1) # alpha=1 performs LASSO
  plot(cv.out)
  bestlambda<-cv.out$lambda.min  # Optimal penalty parameter.  You can make this call visually.
  
  print(coef(model.LASSO,s=bestlambda))
}

Test

if(algo.LASSO == TRUE){
  lasso.pred=predict (model.LASSO ,s=bestlambda ,newx=xtest)
  
  testMSE_LASSO = mean((ytest-lasso.pred)^2)
  print (paste("LASSO Test RMSE: ",testMSE_LASSO, sep=""))
  
  plot(ytest,lasso.pred)
}

LASSO (w/ filtered train)

Train

if(algo.LASSO == TRUE){
  # Formatting data for GLM net
  # you can use model.matrix as well -- model.matrix creates a design (or model) matrix, 
  # e.g., by expanding factors to a set of dummy variables (depending on the contrasts) 
  # and expanding interactions similarly.
  x = as.matrix(data.train2[,feature.names])
  y = data.train2[,label.names]
  
  xtest = as.matrix(data.test[,feature.names]) 
  ytest = data.test[,label.names] 
  
  grid=10^seq(10,-2, length =100)
  
  set.seed(1)
  model.LASSO=glmnet(x,y,alpha=1, lambda =grid)
  
  cv.out=cv.glmnet(x,y,alpha=1) # alpha=1 performs LASSO
  plot(cv.out)
  bestlambda<-cv.out$lambda.min  # Optimal penalty parameter.  You can make this call visually.
  
  print(coef(model.LASSO,s=bestlambda))
}

Test

if(algo.LASSO == TRUE){
  lasso.pred=predict (model.LASSO ,s=bestlambda ,newx=xtest)  
  
  testMSE_LASSO = mean((ytest-lasso.pred)^2)
  print (paste("LASSO Test RMSE: ",testMSE_LASSO, sep=""))
  
  plot(ytest,lasso.pred)
}

LASSO with CV (w/ full train)

Train

if (algo.LASSO.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "glmnet"
                                   ,subopt = 'LASSO'
                                   ,feature.names = feature.names)
  model.LASSO.caret = returned$model
}
## Aggregating results
## Selecting tuning parameters
## Fitting alpha = 1, lambda = 0.179 on full training set
## glmnet 
## 
## 6002 samples
##  240 predictor
## 
## No pre-processing
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 5402, 5401, 5402, 5401, 5402, 5402, ... 
## Resampling results across tuning parameters:
## 
##   lambda      RMSE      Rsquared   MAE     
##   0.01000000  9.666192  0.1975723  7.228049
##   0.01047616  9.665158  0.1976910  7.227242
##   0.01097499  9.664075  0.1978159  7.226391
##   0.01149757  9.662940  0.1979473  7.225489
##   0.01204504  9.661761  0.1980840  7.224545
##   0.01261857  9.660534  0.1982265  7.223560
##   0.01321941  9.659258  0.1983750  7.222534
##   0.01384886  9.657926  0.1985304  7.221474
##   0.01450829  9.656539  0.1986928  7.220360
##   0.01519911  9.655112  0.1988602  7.219202
##   0.01592283  9.653626  0.1990350  7.218007
##   0.01668101  9.652089  0.1992163  7.216781
##   0.01747528  9.650487  0.1994064  7.215507
##   0.01830738  9.648828  0.1996038  7.214190
##   0.01917910  9.647118  0.1998078  7.212827
##   0.02009233  9.645368  0.2000165  7.211434
##   0.02104904  9.643574  0.2002308  7.210015
##   0.02205131  9.641743  0.2004499  7.208564
##   0.02310130  9.639865  0.2006750  7.207086
##   0.02420128  9.637927  0.2009086  7.205554
##   0.02535364  9.635939  0.2011482  7.203978
##   0.02656088  9.633896  0.2013951  7.202345
##   0.02782559  9.631804  0.2016491  7.200703
##   0.02915053  9.629659  0.2019107  7.199048
##   0.03053856  9.627460  0.2021802  7.197358
##   0.03199267  9.625176  0.2024623  7.195609
##   0.03351603  9.622832  0.2027529  7.193834
##   0.03511192  9.620425  0.2030521  7.192055
##   0.03678380  9.617988  0.2033563  7.190277
##   0.03853529  9.615503  0.2036679  7.188477
##   0.04037017  9.612970  0.2039876  7.186646
##   0.04229243  9.610393  0.2043157  7.184790
##   0.04430621  9.607759  0.2046541  7.182946
##   0.04641589  9.605079  0.2050013  7.181077
##   0.04862602  9.602388  0.2053518  7.179211
##   0.05094138  9.599666  0.2057090  7.177331
##   0.05336699  9.596969  0.2060651  7.175486
##   0.05590810  9.594250  0.2064278  7.173682
##   0.05857021  9.591572  0.2067881  7.171937
##   0.06135907  9.588981  0.2071394  7.170321
##   0.06428073  9.586324  0.2075052  7.168723
##   0.06734151  9.583569  0.2078905  7.167060
##   0.07054802  9.580754  0.2082908  7.165331
##   0.07390722  9.577757  0.2087260  7.163496
##   0.07742637  9.574720  0.2091728  7.161675
##   0.08111308  9.571585  0.2096412  7.159857
##   0.08497534  9.568523  0.2101049  7.158031
##   0.08902151  9.565522  0.2105670  7.156124
##   0.09326033  9.562652  0.2110170  7.154329
##   0.09770100  9.559965  0.2114468  7.152699
##   0.10235310  9.557439  0.2118584  7.151266
##   0.10722672  9.555022  0.2122616  7.149994
##   0.11233240  9.552759  0.2126478  7.148940
##   0.11768120  9.550539  0.2130360  7.148005
##   0.12328467  9.548479  0.2134103  7.147346
##   0.12915497  9.546633  0.2137605  7.146992
##   0.13530478  9.544912  0.2141045  7.146753
##   0.14174742  9.543546  0.2144012  7.146802
##   0.14849683  9.542374  0.2146788  7.146949
##   0.15556761  9.541419  0.2149320  7.147412
##   0.16297508  9.540803  0.2151407  7.148288
##   0.17073526  9.540517  0.2153067  7.149610
##   0.17886495  9.540471  0.2154462  7.151216
##   0.18738174  9.540647  0.2155651  7.153027
##   0.19630407  9.540869  0.2156927  7.154761
##   0.20565123  9.541063  0.2158455  7.156428
##   0.21544347  9.541551  0.2159660  7.158141
##   0.22570197  9.542416  0.2160414  7.160203
##   0.23644894  9.543905  0.2160258  7.163069
##   0.24770764  9.546029  0.2159178  7.166655
##   0.25950242  9.548642  0.2157438  7.170682
##   0.27185882  9.551553  0.2155395  7.174818
##   0.28480359  9.555090  0.2152425  7.179380
##   0.29836472  9.559147  0.2148757  7.184056
##   0.31257158  9.563608  0.2144620  7.189023
##   0.32745492  9.568578  0.2139847  7.194257
##   0.34304693  9.573868  0.2134847  7.199868
##   0.35938137  9.579544  0.2129468  7.206048
##   0.37649358  9.585748  0.2123437  7.212695
##   0.39442061  9.592258  0.2117212  7.219697
##   0.41320124  9.599277  0.2110410  7.227128
##   0.43287613  9.606635  0.2103394  7.234877
##   0.45348785  9.614653  0.2095509  7.243221
##   0.47508102  9.623129  0.2087214  7.252132
##   0.49770236  9.631844  0.2079072  7.261469
##   0.52140083  9.640686  0.2071370  7.271222
##   0.54622772  9.649984  0.2063499  7.281531
##   0.57223677  9.659264  0.2056564  7.291960
##   0.59948425  9.669357  0.2048816  7.302947
##   0.62802914  9.680329  0.2040182  7.314588
##   0.65793322  9.691960  0.2031161  7.326856
##   0.68926121  9.704058  0.2022327  7.339464
##   0.72208090  9.716720  0.2013547  7.352565
##   0.75646333  9.729824  0.2005248  7.366245
##   0.79248290  9.744183  0.1995634  7.380834
##   0.83021757  9.759899  0.1984516  7.396596
##   0.86974900  9.777119  0.1971521  7.413448
##   0.91116276  9.795985  0.1956276  7.431538
##   0.95454846  9.816648  0.1938320  7.450995
##   1.00000000  9.839277  0.1917088  7.471944
## 
## Tuning parameter 'alpha' was held constant at a value of 1
## RMSE was used to select the optimal model using the smallest value.
## The final values used for the model were alpha = 1 and lambda = 0.178865.

##    alpha   lambda
## 63     1 0.178865

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

Test

if (algo.LASSO.caret == TRUE){
  test.model(model.LASSO.caret, data.test
             ,method = 'glmnet',subopt = "LASSO"
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   110.5   121.9   125.2   125.2   128.7   138.6 
## [1] "glmnet LASSO Test MSE: 93.024369635398"

LASSO with CV (w/ filtered train)

Train

if (algo.LASSO.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method = "glmnet"
                                   ,subopt = 'LASSO'
                                   ,feature.names = feature.names)
  model.LASSO.caret = returned$model
}
## Aggregating results
## Selecting tuning parameters
## Fitting alpha = 1, lambda = 0.112 on full training set
## glmnet 
## 
## 5699 samples
##  240 predictor
## 
## No pre-processing
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 5130, 5129, 5129, 5128, 5129, 5131, ... 
## Resampling results across tuning parameters:
## 
##   lambda      RMSE      Rsquared   MAE     
##   0.01000000  7.394352  0.3059198  5.951279
##   0.01047616  7.393395  0.3060586  5.950605
##   0.01097499  7.392407  0.3062019  5.949911
##   0.01149757  7.391389  0.3063497  5.949197
##   0.01204504  7.390348  0.3065005  5.948457
##   0.01261857  7.389281  0.3066549  5.947694
##   0.01321941  7.388182  0.3068142  5.946909
##   0.01384886  7.387042  0.3069799  5.946099
##   0.01450829  7.385870  0.3071509  5.945256
##   0.01519911  7.384654  0.3073286  5.944375
##   0.01592283  7.383402  0.3075122  5.943485
##   0.01668101  7.382095  0.3077047  5.942576
##   0.01747528  7.380724  0.3079079  5.941639
##   0.01830738  7.379308  0.3081186  5.940660
##   0.01917910  7.377854  0.3083357  5.939646
##   0.02009233  7.376361  0.3085593  5.938620
##   0.02104904  7.374823  0.3087897  5.937566
##   0.02205131  7.373231  0.3090294  5.936494
##   0.02310130  7.371557  0.3092836  5.935380
##   0.02420128  7.369825  0.3095482  5.934223
##   0.02535364  7.368023  0.3098255  5.932995
##   0.02656088  7.366196  0.3101077  5.931774
##   0.02782559  7.364319  0.3103990  5.930524
##   0.02915053  7.362399  0.3106985  5.929234
##   0.03053856  7.360409  0.3110104  5.927901
##   0.03199267  7.358392  0.3113281  5.926561
##   0.03351603  7.356353  0.3116509  5.925222
##   0.03511192  7.354288  0.3119798  5.923888
##   0.03678380  7.352157  0.3123223  5.922477
##   0.03853529  7.350018  0.3126682  5.921055
##   0.04037017  7.347824  0.3130256  5.919577
##   0.04229243  7.345619  0.3133872  5.918069
##   0.04430621  7.343420  0.3137503  5.916573
##   0.04641589  7.341242  0.3141128  5.915080
##   0.04862602  7.339099  0.3144734  5.913590
##   0.05094138  7.336976  0.3148352  5.912170
##   0.05336699  7.334914  0.3151914  5.910801
##   0.05590810  7.332896  0.3155447  5.909500
##   0.05857021  7.330759  0.3159255  5.908096
##   0.06135907  7.328637  0.3163103  5.906728
##   0.06428073  7.326494  0.3167056  5.905318
##   0.06734151  7.324357  0.3171073  5.903903
##   0.07054802  7.322224  0.3175163  5.902542
##   0.07390722  7.320244  0.3179047  5.901231
##   0.07742637  7.318454  0.3182657  5.900129
##   0.08111308  7.316814  0.3186061  5.899166
##   0.08497534  7.315235  0.3189432  5.898199
##   0.08902151  7.313815  0.3192598  5.897375
##   0.09326033  7.312638  0.3195401  5.896756
##   0.09770100  7.311737  0.3197797  5.896396
##   0.10235310  7.311084  0.3199826  5.896148
##   0.10722672  7.310732  0.3201403  5.896124
##   0.11233240  7.310636  0.3202637  5.896261
##   0.11768120  7.310823  0.3203463  5.896623
##   0.12328467  7.311313  0.3203836  5.897271
##   0.12915497  7.312265  0.3203467  5.898231
##   0.13530478  7.313719  0.3202283  5.899593
##   0.14174742  7.315633  0.3200369  5.901269
##   0.14849683  7.318033  0.3197692  5.903376
##   0.15556761  7.321012  0.3194016  5.905806
##   0.16297508  7.324796  0.3188888  5.908659
##   0.17073526  7.329128  0.3182845  5.911814
##   0.17886495  7.333865  0.3176172  5.915093
##   0.18738174  7.339096  0.3168691  5.918695
##   0.19630407  7.344354  0.3161352  5.922274
##   0.20565123  7.350001  0.3153406  5.926057
##   0.21544347  7.355512  0.3145941  5.929879
##   0.22570197  7.361334  0.3138106  5.933927
##   0.23644894  7.366939  0.3131008  5.938007
##   0.24770764  7.372907  0.3123414  5.942646
##   0.25950242  7.378571  0.3116727  5.947308
##   0.27185882  7.384338  0.3110179  5.952074
##   0.28480359  7.390500  0.3103233  5.957475
##   0.29836472  7.397088  0.3095744  5.963230
##   0.31257158  7.404037  0.3087840  5.969202
##   0.32745492  7.411593  0.3079085  5.975640
##   0.34304693  7.419726  0.3069556  5.982402
##   0.35938137  7.428685  0.3058733  5.989745
##   0.37649358  7.438664  0.3046248  5.997874
##   0.39442061  7.449543  0.3032326  6.006613
##   0.41320124  7.461050  0.3017569  6.015988
##   0.43287613  7.473288  0.3001806  6.026036
##   0.45348785  7.485938  0.2985704  6.036371
##   0.47508102  7.499409  0.2968454  6.047363
##   0.49770236  7.512757  0.2952256  6.058253
##   0.52140083  7.526702  0.2935613  6.069487
##   0.54622772  7.540119  0.2921270  6.080598
##   0.57223677  7.554301  0.2906077  6.092386
##   0.59948425  7.568860  0.2891178  6.104557
##   0.62802914  7.584458  0.2874980  6.117668
##   0.65793322  7.600688  0.2858619  6.131499
##   0.68926121  7.618246  0.2840318  6.146177
##   0.72208090  7.636694  0.2821320  6.161483
##   0.75646333  7.655954  0.2801977  6.177375
##   0.79248290  7.675003  0.2785357  6.193192
##   0.83021757  7.695369  0.2767489  6.209980
##   0.86974900  7.716927  0.2748772  6.227802
##   0.91116276  7.740408  0.2727069  6.247250
##   0.95454846  7.766099  0.2701419  6.268357
##   1.00000000  7.794200  0.2670973  6.291119
## 
## Tuning parameter 'alpha' was held constant at a value of 1
## RMSE was used to select the optimal model using the smallest value.
## The final values used for the model were alpha = 1 and lambda = 0.1123324.

##    alpha    lambda
## 53     1 0.1123324

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

Test

if (algo.LASSO.caret == TRUE){
  test.model(model.LASSO.caret, data.test
             ,method = 'glmnet',subopt = "LASSO"
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   107.9   120.5   123.8   123.9   127.4   137.7 
## [1] "glmnet LASSO Test MSE: 95.5780434455831"

LARS with CV (w/ full train)

Train

if (algo.LARS.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "lars"
                                   ,subopt = 'NULL'
                                   ,feature.names = feature.names)
  model.LARS.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info =
## trainInfo, : There were missing values in resampled performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting fraction = 0.374 on full training set
## Least Angle Regression 
## 
## 6002 samples
##  240 predictor
## 
## Pre-processing: centered (240), scaled (240) 
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 5402, 5401, 5402, 5401, 5402, 5402, ... 
## Resampling results across tuning parameters:
## 
##   fraction    RMSE       Rsquared   MAE     
##   0.00000000  10.755873        NaN  8.183202
##   0.01010101  10.638681  0.1031351  8.099276
##   0.02020202  10.534303  0.1031351  8.025817
##   0.03030303  10.443123  0.1031351  7.961226
##   0.04040404  10.364543  0.1151712  7.904634
##   0.05050505  10.291873  0.1261041  7.851333
##   0.06060606  10.226043  0.1372467  7.800762
##   0.07070707  10.164651  0.1488783  7.751251
##   0.08080808  10.105823  0.1592419  7.702977
##   0.09090909  10.050443  0.1673747  7.657500
##   0.10101010   9.998659  0.1737154  7.613979
##   0.11111111   9.951421  0.1788702  7.572597
##   0.12121212   9.907017  0.1841678  7.533306
##   0.13131313   9.864593  0.1889969  7.495590
##   0.14141414   9.825020  0.1929367  7.459318
##   0.15151515   9.788334  0.1961360  7.424647
##   0.16161616   9.754567  0.1987215  7.391671
##   0.17171717   9.723789  0.2008167  7.360347
##   0.18181818   9.696272  0.2026758  7.331531
##   0.19191919   9.671307  0.2046897  7.305318
##   0.20202020   9.650213  0.2063050  7.282284
##   0.21212121   9.631968  0.2078516  7.262190
##   0.22222222   9.615078  0.2094596  7.244243
##   0.23232323   9.599603  0.2109605  7.227782
##   0.24242424   9.585945  0.2122825  7.213216
##   0.25252525   9.574418  0.2134010  7.200699
##   0.26262626   9.564882  0.2143153  7.190621
##   0.27272727   9.557197  0.2150544  7.182093
##   0.28282828   9.551899  0.2154729  7.175137
##   0.29292929   9.547874  0.2157640  7.169147
##   0.30303030   9.544848  0.2159548  7.164323
##   0.31313131   9.543098  0.2159665  7.160848
##   0.32323232   9.542095  0.2158876  7.158694
##   0.33333333   9.541523  0.2157654  7.156951
##   0.34343434   9.541175  0.2156366  7.155172
##   0.35353535   9.540949  0.2155131  7.153553
##   0.36363636   9.540575  0.2154384  7.151713
##   0.37373737   9.540407  0.2153477  7.150048
##   0.38383838   9.540522  0.2152239  7.148774
##   0.39393939   9.540802  0.2150855  7.147821
##   0.40404040   9.541279  0.2149241  7.147010
##   0.41414141   9.542047  0.2147214  7.146546
##   0.42424242   9.542754  0.2145401  7.146289
##   0.43434343   9.543679  0.2143296  7.146188
##   0.44444444   9.544637  0.2141242  7.146100
##   0.45454545   9.545895  0.2138755  7.146260
##   0.46464646   9.547197  0.2136256  7.146505
##   0.47474747   9.548535  0.2133781  7.146834
##   0.48484848   9.550055  0.2131061  7.147353
##   0.49494949   9.551714  0.2128161  7.148094
##   0.50505051   9.553525  0.2125058  7.148984
##   0.51515152   9.555225  0.2122188  7.149841
##   0.52525253   9.556988  0.2119256  7.150836
##   0.53535354   9.558862  0.2116193  7.152039
##   0.54545455   9.560811  0.2113053  7.153319
##   0.55555556   9.562836  0.2109841  7.154572
##   0.56565657   9.565026  0.2106404  7.155917
##   0.57575758   9.567269  0.2102944  7.157302
##   0.58585859   9.569552  0.2099471  7.158708
##   0.59595960   9.571933  0.2095875  7.160133
##   0.60606061   9.574364  0.2092249  7.161534
##   0.61616162   9.576727  0.2088768  7.162913
##   0.62626263   9.579064  0.2085358  7.164336
##   0.63636364   9.581373  0.2082035  7.165762
##   0.64646465   9.583659  0.2078789  7.167190
##   0.65656566   9.585876  0.2075695  7.168535
##   0.66666667   9.588060  0.2072689  7.169824
##   0.67676768   9.590216  0.2069763  7.171178
##   0.68686869   9.592510  0.2066669  7.172615
##   0.69696970   9.594888  0.2063484  7.174152
##   0.70707071   9.597382  0.2060164  7.175811
##   0.71717172   9.599910  0.2056825  7.177552
##   0.72727273   9.602509  0.2053418  7.179352
##   0.73737374   9.605163  0.2049959  7.181203
##   0.74747475   9.607861  0.2046467  7.183084
##   0.75757576   9.610595  0.2042957  7.185012
##   0.76767677   9.613366  0.2039430  7.186979
##   0.77777778   9.616185  0.2035867  7.189008
##   0.78787879   9.619013  0.2032328  7.191052
##   0.79797980   9.621913  0.2028712  7.193164
##   0.80808081   9.624853  0.2025056  7.195373
##   0.81818182   9.627849  0.2021348  7.197677
##   0.82828283   9.630875  0.2017631  7.200000
##   0.83838384   9.633919  0.2013929  7.202409
##   0.84848485   9.637012  0.2010182  7.204888
##   0.85858586   9.640167  0.2006377  7.207364
##   0.86868687   9.643374  0.2002532  7.209888
##   0.87878788   9.646678  0.1998584  7.212509
##   0.88888889   9.650078  0.1994524  7.215203
##   0.89898990   9.653500  0.1990477  7.217928
##   0.90909091   9.656969  0.1986402  7.220724
##   0.91919192   9.660478  0.1982311  7.223520
##   0.92929293   9.664016  0.1978203  7.226358
##   0.93939394   9.667543  0.1974153  7.229111
##   0.94949495   9.671092  0.1970111  7.231887
##   0.95959596   9.674652  0.1966089  7.234684
##   0.96969697   9.678209  0.1962117  7.237470
##   0.97979798   9.681767  0.1958179  7.240345
##   0.98989899   9.685368  0.1954220  7.243263
##   1.00000000   9.689033  0.1950205  7.246237
## 
## RMSE was used to select the optimal model using the smallest value.
## The final value used for the model was fraction = 0.3737374.

##     fraction
## 38 0.3737374
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

Test

if (algo.LARS.caret == TRUE){
  test.model(model.LARS.caret, data.test
             ,method = 'lars',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   110.4   121.8   125.1   125.2   128.7   138.7 
## [1] "lars  Test MSE: 93.0202187622652"

LARS with CV (w/ filtered train)

Train

if (algo.LARS.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method = "lars"
                                   ,subopt = 'NULL'
                                   ,feature.names = feature.names)
  model.LARS.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info =
## trainInfo, : There were missing values in resampled performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting fraction = 0.505 on full training set
## Least Angle Regression 
## 
## 5699 samples
##  240 predictor
## 
## Pre-processing: centered (240), scaled (240) 
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 5130, 5129, 5129, 5128, 5129, 5131, ... 
## Resampling results across tuning parameters:
## 
##   fraction    RMSE      Rsquared   MAE     
##   0.00000000  8.856124        NaN  7.054659
##   0.01010101  8.726158  0.1435527  6.964224
##   0.02020202  8.609544  0.1435527  6.885808
##   0.03030303  8.508613  0.1470622  6.817618
##   0.04040404  8.417056  0.1689636  6.754915
##   0.05050505  8.331572  0.1828876  6.697178
##   0.06060606  8.253892  0.1923703  6.644691
##   0.07070707  8.183425  0.2060264  6.594721
##   0.08080808  8.115942  0.2197014  6.544531
##   0.09090909  8.051513  0.2310247  6.495165
##   0.10101010  7.990863  0.2398913  6.447632
##   0.11111111  7.934749  0.2476784  6.403395
##   0.12121212  7.881291  0.2557757  6.360952
##   0.13131313  7.830710  0.2625711  6.320523
##   0.14141414  7.783252  0.2681305  6.282314
##   0.15151515  7.738975  0.2726591  6.246177
##   0.16161616  7.697933  0.2763324  6.212060
##   0.17171717  7.660483  0.2795140  6.181051
##   0.18181818  7.626599  0.2830240  6.153130
##   0.19191919  7.595486  0.2862114  6.126996
##   0.20202020  7.566952  0.2891919  6.103011
##   0.21212121  7.541293  0.2918636  6.081621
##   0.22222222  7.517885  0.2944476  6.062433
##   0.23232323  7.495457  0.2972456  6.044323
##   0.24242424  7.474121  0.2999830  6.026846
##   0.25252525  7.454134  0.3025815  6.010492
##   0.26262626  7.435387  0.3050091  5.995359
##   0.27272727  7.419356  0.3069726  5.982318
##   0.28282828  7.405620  0.3085604  5.970957
##   0.29292929  7.393813  0.3099135  5.960797
##   0.30303030  7.384307  0.3109927  5.952566
##   0.31313131  7.376311  0.3119170  5.945925
##   0.32323232  7.369392  0.3127398  5.940339
##   0.33333333  7.362810  0.3135630  5.935354
##   0.34343434  7.356311  0.3144408  5.930699
##   0.35353535  7.350311  0.3152681  5.926491
##   0.36363636  7.344714  0.3160502  5.922653
##   0.37373737  7.339475  0.3167820  5.919136
##   0.38383838  7.334422  0.3174971  5.915655
##   0.39393939  7.329794  0.3181510  5.912492
##   0.40404040  7.325647  0.3187316  5.909504
##   0.41414141  7.322043  0.3192234  5.906890
##   0.42424242  7.319021  0.3196113  5.904510
##   0.43434343  7.316705  0.3198735  5.902462
##   0.44444444  7.314726  0.3200894  5.900624
##   0.45454545  7.313086  0.3202537  5.899108
##   0.46464646  7.311851  0.3203559  5.897996
##   0.47474747  7.311095  0.3203773  5.897208
##   0.48484848  7.310544  0.3203740  5.896561
##   0.49494949  7.310258  0.3203291  5.896216
##   0.50505051  7.310254  0.3202390  5.896089
##   0.51515152  7.310502  0.3201106  5.896048
##   0.52525253  7.310834  0.3199748  5.896105
##   0.53535354  7.311296  0.3198212  5.896295
##   0.54545455  7.311953  0.3196387  5.896579
##   0.55555556  7.312799  0.3194277  5.896988
##   0.56565657  7.313842  0.3191879  5.897523
##   0.57575758  7.315034  0.3189254  5.898171
##   0.58585859  7.316285  0.3186568  5.898966
##   0.59595960  7.317596  0.3183831  5.899744
##   0.60606061  7.318943  0.3181084  5.900550
##   0.61616162  7.320442  0.3178121  5.901501
##   0.62626263  7.322133  0.3174850  5.902575
##   0.63636364  7.323942  0.3171412  5.903729
##   0.64646465  7.325773  0.3167990  5.904929
##   0.65656566  7.327576  0.3164672  5.906109
##   0.66666667  7.329372  0.3161426  5.907296
##   0.67676768  7.331233  0.3158106  5.908507
##   0.68686869  7.333100  0.3154827  5.909679
##   0.69696970  7.334919  0.3151677  5.910834
##   0.70707071  7.336834  0.3148393  5.912076
##   0.71717172  7.338832  0.3145013  5.913420
##   0.72727273  7.340944  0.3141478  5.914864
##   0.73737374  7.343126  0.3137867  5.916343
##   0.74747475  7.345449  0.3134042  5.917940
##   0.75757576  7.347798  0.3130207  5.919543
##   0.76767677  7.350178  0.3126347  5.921140
##   0.77777778  7.352620  0.3122407  5.922743
##   0.78787879  7.355062  0.3118510  5.924346
##   0.79797980  7.357514  0.3114633  5.925936
##   0.80808081  7.360072  0.3110605  5.927626
##   0.81818182  7.362709  0.3106474  5.929406
##   0.82828283  7.365358  0.3102349  5.931176
##   0.83838384  7.368049  0.3098183  5.932970
##   0.84848485  7.370784  0.3093986  5.934822
##   0.85858586  7.373524  0.3089830  5.936649
##   0.86868687  7.376262  0.3085721  5.938512
##   0.87878788  7.379041  0.3081568  5.940437
##   0.88888889  7.381895  0.3077333  5.942404
##   0.89898990  7.384736  0.3073160  5.944395
##   0.90909091  7.387598  0.3068993  5.946452
##   0.91919192  7.390523  0.3064754  5.948542
##   0.92929293  7.393581  0.3060315  5.950704
##   0.93939394  7.396696  0.3055813  5.952929
##   0.94949495  7.399823  0.3051326  5.955247
##   0.95959596  7.402998  0.3046801  5.957645
##   0.96969697  7.406213  0.3042248  5.960048
##   0.97979798  7.409451  0.3037702  5.962446
##   0.98989899  7.412744  0.3033102  5.964869
##   1.00000000  7.416075  0.3028485  5.967363
## 
## RMSE was used to select the optimal model using the smallest value.
## The final value used for the model was fraction = 0.5050505.

##     fraction
## 51 0.5050505
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

Test

if (algo.LARS.caret == TRUE){
  test.model(model.LARS.caret, data.test
             ,method = 'lars',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   107.8   120.5   123.8   123.9   127.4   137.7 
## [1] "lars  Test MSE: 95.6060942814546"